diff --git a/.harness/python311_genai_default_pr_input.yaml b/.harness/python311_genai_default_pr_input.yaml deleted file mode 100644 index 6a982cb01..000000000 --- a/.harness/python311_genai_default_pr_input.yaml +++ /dev/null @@ -1,33 +0,0 @@ -inputSet: - name: python311_genai_default_pr_input - tags: {} - identifier: python311_genai_default_pr_input - orgIdentifier: Custom_Models - projectIdentifier: datarobotusermodels - pipeline: - identifier: test_functional_by_framework - properties: - ci: - codebase: - build: - type: PR - spec: - number: <+trigger.prNumber> - stages: - - stage: - identifier: Build_image_because_of_change - type: Pipeline - spec: - inputs: - identifier: env_image_publish - properties: - ci: - codebase: - build: - type: PR - spec: - number: <+trigger.prNumber> - variables: - - name: framework - type: String - value: python311_genai diff --git a/.harness/python311_genai_image_build_default_pr_input.yaml b/.harness/python311_genai_image_build_default_pr_input.yaml deleted file mode 100644 index 801756771..000000000 --- a/.harness/python311_genai_image_build_default_pr_input.yaml +++ /dev/null @@ -1,24 +0,0 @@ -inputSet: - name: python311_genai_image_build_default_pr_input - identifier: python311_genai_image_build_default_pr_input - orgIdentifier: Custom_Models - projectIdentifier: datarobotusermodels - pipeline: - identifier: env_image_publish - properties: - ci: - codebase: - build: - type: branch - spec: - branch: <+trigger.branch> - variables: - - name: env_folder - type: String - value: public_dropin_environments - - name: env_name - type: String - value: python311_genai - - name: image_tag - type: String - value: <+pipeline.variables.env_folder>_<+pipeline.variables.env_name>_latest diff --git a/.harness/python311_genai_local_default_pr_input.yaml b/.harness/python311_genai_local_default_pr_input.yaml deleted file mode 100644 index 841c5faf2..000000000 --- a/.harness/python311_genai_local_default_pr_input.yaml +++ /dev/null @@ -1,21 +0,0 @@ -inputSet: - name: python311_genai_local_default_pr_input - identifier: python311_genai_local_default_pr_input - orgIdentifier: Custom_Models - projectIdentifier: datarobotusermodels - pipeline: - identifier: test_functional_by_framework - properties: - ci: - codebase: - build: - type: PR - spec: - number: <+trigger.prNumber> - variables: - - name: framework - type: String - value: python311_genai - - name: use_local_dockerfile - type: String - value: "true" diff --git a/.harness/python311_genai_local_image_build_default_pr_input.yaml b/.harness/python311_genai_local_image_build_default_pr_input.yaml deleted file mode 100644 index a2da5a555..000000000 --- a/.harness/python311_genai_local_image_build_default_pr_input.yaml +++ /dev/null @@ -1,27 +0,0 @@ -inputSet: - name: python311_genai_local_image_build_default_pr_input - identifier: python311_genai_local_image_build_default_pr_input - orgIdentifier: Custom_Models - projectIdentifier: datarobotusermodels - pipeline: - identifier: env_image_publish - properties: - ci: - codebase: - build: - type: branch - spec: - branch: <+trigger.branch> - variables: - - name: env_folder - type: String - value: public_dropin_environments - - name: env_name - type: String - value: python311_genai - - name: image_tag - type: String - value: <+pipeline.variables.env_folder>_<+pipeline.variables.env_name>_latest - - name: use_local_dockerfile - type: String - value: "true" diff --git a/.harness/python311_genai_local_on_pr.yaml b/.harness/python311_genai_local_on_pr.yaml deleted file mode 100644 index da81366d1..000000000 --- a/.harness/python311_genai_local_on_pr.yaml +++ /dev/null @@ -1,33 +0,0 @@ -trigger: - name: python311_genai local on pr - identifier: python311_genai_local_on_pr - enabled: true - stagesToExecute: [] - description: "" - tags: {} - encryptedWebhookSecretIdentifier: "" - orgIdentifier: Custom_Models - projectIdentifier: datarobotusermodels - pipelineIdentifier: test_functional_by_framework - source: - type: Webhook - spec: - type: Github - spec: - type: PullRequest - spec: - connectorRef: account.svc_harness_git1 - autoAbortPreviousExecutions: false - payloadConditions: - - key: targetBranch - operator: Equals - value: master - headerConditions: [] - repoName: datarobot-user-models - actions: - - Open - - Reopen - - Synchronize - pipelineBranchName: <+trigger.branch> - inputSetRefs: - - python311_genai_local_default_pr_input diff --git a/.harness/python311_genai_local_on_push_master.yaml b/.harness/python311_genai_local_on_push_master.yaml deleted file mode 100644 index 82b0e6dbb..000000000 --- a/.harness/python311_genai_local_on_push_master.yaml +++ /dev/null @@ -1,33 +0,0 @@ -trigger: - name: python311_genai_local_on_push - identifier: python311_genai_local_on_push_master - enabled: true - stagesToExecute: [] - description: "" - tags: {} - encryptedWebhookSecretIdentifier: "" - orgIdentifier: Custom_Models - projectIdentifier: datarobotusermodels - pipelineIdentifier: env_image_publish - source: - type: Webhook - spec: - type: Github - spec: - type: Push - spec: - connectorRef: account.svc_harness_git1 - autoAbortPreviousExecutions: false - payloadConditions: - - key: changedFiles - operator: Contains - value: public_dropin_environments/python311_genai - - key: targetBranch - operator: Equals - value: master - headerConditions: [] - repoName: datarobot-user-models - actions: [] - pipelineBranchName: <+trigger.branch> - inputSetRefs: - - python311_genai_local_image_build_default_pr_input diff --git a/.harness/python311_genai_publish_to_docker_pr_input.yaml b/.harness/python311_genai_publish_to_docker_pr_input.yaml deleted file mode 100644 index b43e46b85..000000000 --- a/.harness/python311_genai_publish_to_docker_pr_input.yaml +++ /dev/null @@ -1,27 +0,0 @@ -inputSet: - name: python311_genai_publish_to_docker_pr_input - identifier: python311_genai_publish_to_docker_pr_input - orgIdentifier: Custom_Models - projectIdentifier: datarobotusermodels - pipeline: - identifier: publish_to_docker - properties: - ci: - codebase: - build: - type: branch - spec: - branch: <+trigger.branch> - variables: - - name: env_folder - type: String - value: public_dropin_environments - - name: env_name - type: String - value: python311_genai - - name: repo_name - type: String - value: python-genai - - name: target_branch - type: String - value: <+trigger.branch> diff --git a/.harness/python311_genai_reconcile_dependencies_default_pr_input.yaml b/.harness/python311_genai_reconcile_dependencies_default_pr_input.yaml deleted file mode 100644 index f0ed176c0..000000000 --- a/.harness/python311_genai_reconcile_dependencies_default_pr_input.yaml +++ /dev/null @@ -1,24 +0,0 @@ -inputSet: - name: python311_genai_reconcile_dependencies_default_pr_input - identifier: python311_genai_reconcile_dependencies_default_pr_input - orgIdentifier: Custom_Models - projectIdentifier: datarobotusermodels - pipeline: - identifier: reconcile_dependencies - properties: - ci: - codebase: - build: - type: branch - spec: - branch: <+trigger.branch> - variables: - - name: env_folder - type: String - value: public_dropin_environments - - name: env_name - type: String - value: python311_genai - - name: image_uri - type: String - value: datarobotdev/mirror_chainguard_datarobot.com_python-fips:3.11-dev diff --git a/.harness/python311_genai_trivy_scan_pr_input.yaml b/.harness/python311_genai_trivy_scan_pr_input.yaml deleted file mode 100644 index 0911e2fd8..000000000 --- a/.harness/python311_genai_trivy_scan_pr_input.yaml +++ /dev/null @@ -1,11 +0,0 @@ -inputSet: - name: python311_genai_trivy_scan_pr_input - identifier: python311_genai_trivy_scan_pr_input - orgIdentifier: Custom_Models - projectIdentifier: datarobotusermodels - pipeline: - identifier: trivy_scan - variables: - - name: imageName - type: String - value: env-python-genai diff --git a/.harness/python311_genai_update_env_version_pr_input.yaml b/.harness/python311_genai_update_env_version_pr_input.yaml deleted file mode 100644 index 8e4689db5..000000000 --- a/.harness/python311_genai_update_env_version_pr_input.yaml +++ /dev/null @@ -1,21 +0,0 @@ -inputSet: - name: python311_genai_update_env_version_pr_input - identifier: python311_genai_update_env_version_pr_input - orgIdentifier: Custom_Models - projectIdentifier: datarobotusermodels - pipeline: - identifier: update_env_version - properties: - ci: - codebase: - build: - type: PR - spec: - number: <+trigger.prNumber> - variables: - - name: env_dir - type: String - value: public_dropin_environments - - name: env_name - type: String - value: python311_genai diff --git a/.harness/reusable_build_images.yaml b/.harness/reusable_build_images.yaml index 7e95fb346..ffae7a277 100644 --- a/.harness/reusable_build_images.yaml +++ b/.harness/reusable_build_images.yaml @@ -1,6 +1,6 @@ pipeline: name: WIP REUSABLE PIPELINE to build images - identifier: WIP_build_images + identifier: envs_image_build projectIdentifier: datarobotusermodels orgIdentifier: Custom_Models tags: {} @@ -100,7 +100,7 @@ pipeline: condition: <+pipeline.variables.envs_folders>!="" - stage: name: build images - identifier: build_images + identifier: build_img description: "" type: CI spec: @@ -136,7 +136,7 @@ pipeline: strategy: matrix: image: <+json.list("images", <+pipeline.stages.get_changes_and_output_images_build_matrix.spec.execution.steps.Build_params_matrix.output.outputVariables.matrix_json>)> - nodeName: <+matrix.image.repository>:<+matrix.image.tag> + nodeName: <+strategy.iteration>-<+matrix.image.repository>:<+matrix.image.tag> description: |- This pipeline can be used in other repositories to: * detect which environments have changed. Environment is a folder with env_info.json diff --git a/.harness/test_functional_by_framework_multisteps.yaml b/.harness/test_functional_by_framework_multisteps.yaml index 8829f24c7..00dc48e29 100644 --- a/.harness/test_functional_by_framework_multisteps.yaml +++ b/.harness/test_functional_by_framework_multisteps.yaml @@ -8,7 +8,7 @@ pipeline: identifier: Reconcile_envVersionIds_requirements template: templateRef: org.Execution_Environments_Reconcile_Stage - versionLabel: v1 + versionLabel: v2 templateInputs: type: CI variables: @@ -34,10 +34,10 @@ pipeline: type: Pipeline spec: org: Custom_Models - pipeline: WIP_build_images + pipeline: envs_image_build project: datarobotusermodels inputs: - identifier: WIP_build_images + identifier: envs_image_build variables: - name: repo type: String @@ -254,3 +254,12 @@ pipeline: description: Whether to force all requirements update to the latest versions required: false value: <+input>.default(false).allowedValues(true,false) + properties: + ci: + codebase: + connectorRef: account.svc_harness_git1 + repoName: <+pipeline.variables.target_repo> + build: + type: branch + spec: + branch: <+pipeline.variables.source_branch> diff --git a/.harness/test_functional_general.yaml b/.harness/test_functional_general.yaml index 927f1efda..3c71bd06a 100644 --- a/.harness/test_functional_general.yaml +++ b/.harness/test_functional_general.yaml @@ -27,7 +27,7 @@ pipeline: shell: Bash command: |- ./harness_scripts/functional_general/mlops_reporting_entrypoint.sh \ - <+secrets.getValue("account.dockerhubdatarobotread2orgread1")> + <+secrets.getValue("org.genai-systems-dockerhub-login")> <+secrets.getValue("org.genai-systems-dockerhub-token")> resources: limits: memory: 3G @@ -92,7 +92,7 @@ pipeline: shell: Bash command: | ./harness_scripts/functional_general/general_tests_entrypoint.sh \ - <+secrets.getValue("account.dockerhubdatarobotread2orgread1")> + <+secrets.getValue("org.genai-systems-dockerhub-login")> <+secrets.getValue("org.genai-systems-dockerhub-token")> resources: limits: memory: 8G diff --git a/DRCODEOWNERS b/DRCODEOWNERS index 019f30d27..a104815b6 100644 --- a/DRCODEOWNERS +++ b/DRCODEOWNERS @@ -32,7 +32,6 @@ /model_templates/proxy_model_datarobot @datarobot/genai-systems /model_templates/triton_onnx_unstructured @datarobot/genai-systems /public_dropin_environments/java_codegen @datarobot/genai-systems -/public_dropin_environments/python311_genai @datarobot/buzok /public_dropin_environments/python311_genai_agents @datarobot/buzok /public_dropin_environments/python3_keras @datarobot/genai-systems @datarobot/core-modeling /public_dropin_environments/python3_onnx @datarobot/genai-systems diff --git a/harness_scripts/functional_general/general_tests_entrypoint.sh b/harness_scripts/functional_general/general_tests_entrypoint.sh index d41cd55a8..fb531fde4 100755 --- a/harness_scripts/functional_general/general_tests_entrypoint.sh +++ b/harness_scripts/functional_general/general_tests_entrypoint.sh @@ -3,11 +3,12 @@ script_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" . ${script_dir}/../common/common.sh -DOCKER_HUB_SECRET=$1 +DOCKER_HUB_USERNAME=$1 +DOCKER_HUB_SECRET=$2 if [ -n "$HARNESS_BUILD_ID" ]; then title "Running within a Harness pipeline." [ -z $DOCKER_HUB_SECRET ] && echo "Docker HUB secret is expected as an input argument" && exit 1 - docker login -u datarobotread2 -p $DOCKER_HUB_SECRET || { echo "Docker login failed"; exit 1; } + docker login -u $DOCKER_HUB_USERNAME -p $DOCKER_HUB_SECRET || { echo "Docker login failed"; exit 1; } fi title "Build image for tests" diff --git a/harness_scripts/functional_general/mlops_reporting_entrypoint.sh b/harness_scripts/functional_general/mlops_reporting_entrypoint.sh index 0fcd0055e..9f4107230 100755 --- a/harness_scripts/functional_general/mlops_reporting_entrypoint.sh +++ b/harness_scripts/functional_general/mlops_reporting_entrypoint.sh @@ -4,11 +4,12 @@ script_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" . ${script_dir}/../common/common.sh . ${script_dir}/../../tools/create-and-source-venv.sh -DOCKER_HUB_SECRET=$1 +DOCKER_HUB_USERNAME=$1 +DOCKER_HUB_SECRET=$2 if [ -n "$HARNESS_BUILD_ID" ]; then title "Running within a Harness pipeline." [ -z $DOCKER_HUB_SECRET ] && echo "Docker HUB secret is expected as an input argument" && exit 1 - docker login -u datarobotread2 -p $DOCKER_HUB_SECRET || { echo "Docker login failed"; exit 1; } + docker login -u $DOCKER_HUB_USERNAME -p $DOCKER_HUB_SECRET || { echo "Docker login failed"; exit 1; } fi title "Preparing to test" diff --git a/jenkins/test_functional_per_framework.sh b/jenkins/test_functional_per_framework.sh index 152d97274..0dd580eef 100755 --- a/jenkins/test_functional_per_framework.sh +++ b/jenkins/test_functional_per_framework.sh @@ -25,8 +25,6 @@ elif [ "$1" = "python3_pmml" ]; then DOCKER_IMAGE="python3_pmml" elif [ "$1" = "python3_pytorch" ]; then DOCKER_IMAGE="python3_pytorch" -elif [ "$1" = "python311_genai" ]; then - DOCKER_IMAGE="python311_genai" elif [ "$1" = "python3_sklearn" ]; then DOCKER_IMAGE="python3_sklearn" elif [ "$1" = "python3_xgboost" ]; then diff --git a/pipelineController.yaml b/pipelineController.yaml index 6e9f55bbd..a6ecc0903 100644 --- a/pipelineController.yaml +++ b/pipelineController.yaml @@ -66,13 +66,6 @@ test_functional_python_pytorch: &test_functional_python_pytorch FRAMEWORK: python3_pytorch phase: 3 -test_functional_python311_genai: &test_functional_python311_genai - taskName: test_functional_python311_genai - definition: jenkins/test_functional_per_framework.groovy - environment: - FRAMEWORK: python311_genai - phase: 3 - test_functional_python_sklearn: &test_functional_python_sklearn taskName: test_functional_python_sklearn definition: jenkins/test_functional_per_framework.groovy @@ -155,10 +148,6 @@ repositoryTasks: script: - *build_drum - *test_functional_python_pytorch - - regex: ".*test_functional_python311_genai.*" - script: - - *build_drum - - *test_functional_python311_genai - regex: ".*test_functional_python_sklearn.*" script: - *build_drum @@ -194,7 +183,6 @@ repositoryTasks: - *test_functional_python_onnx - *test_functional_python_pmml - *test_functional_python_pytorch - - *test_functional_python311_genai - *test_functional_python_sklearn - *test_functional_python_xgboost - *test_functional_r_environment diff --git a/public_dropin_apps_environments/python312_apps/Dockerfile b/public_dropin_apps_environments/python312_apps/Dockerfile index a4f1ecb86..49276b451 100644 --- a/public_dropin_apps_environments/python312_apps/Dockerfile +++ b/public_dropin_apps_environments/python312_apps/Dockerfile @@ -1,8 +1,14 @@ -FROM python:3.12-slim +FROM datarobotdev/mirror_chainguard_datarobot.com_python-fips:3.12-dev # This makes print statements show up in the logs API ENV PYTHONUNBUFFERED=1 +# This allows code to access ~ and not default to /, which may not be accessible +ENV HOME=/opt/code + +# Add .local/bin to PATH for user-installed packages (gunicorn, flask, etc.) +ENV PATH=/opt/code/.local/bin:$PATH + WORKDIR /opt/code EXPOSE 8080 diff --git a/public_dropin_apps_environments/python312_apps/env_info.json b/public_dropin_apps_environments/python312_apps/env_info.json index e9ce0661e..472d32362 100644 --- a/public_dropin_apps_environments/python312_apps/env_info.json +++ b/public_dropin_apps_environments/python312_apps/env_info.json @@ -3,11 +3,15 @@ "name": "[DataRobot] Python 3.12 Applications Base", "description": "Base image for Python 3.12 custom applications.", "programmingLanguage": "python", - "environmentVersionId": "672340d90513a1231033a268", + "label": "", + "environmentVersionId": "687e42bed30a56120d62f274", "environmentVersionDescription": "Python 3.12 without any additional packages.", "isPublic": true, + "isDownloadable": true, "useCases": [ "customApplication" ], - "imageRepository": "env-apps-python312-apps" + "contextUrl": null, + "imageRepository": "env-apps-python312-apps", + "tags": [] } diff --git a/public_dropin_environments/java_codegen/Dockerfile b/public_dropin_environments/java_codegen/Dockerfile index 936560f85..6c6e60b2c 100644 --- a/public_dropin_environments/java_codegen/Dockerfile +++ b/public_dropin_environments/java_codegen/Dockerfile @@ -1,6 +1,6 @@ # This is a private chain-guard development image that is stored in DataRobot's private registry. # Replace it with your own development chain-gaurd image if you build your own. -ARG BASE_ROOT_IMAGE=datarobotdev/mirror_chainguard_datarobot.com_python-fips:3.11-dev +ARG BASE_ROOT_IMAGE=datarobot/mirror_chainguard_datarobot.com_python-fips:3.11-dev FROM ${BASE_ROOT_IMAGE} AS build USER root @@ -10,7 +10,7 @@ RUN apk add --no-cache openjdk-11 # This is a private production chain-guard image that is stored in DataRobot's private registry. # Replace it with your own production chain-gaurd image if you build your own. -FROM datarobotdev/mirror_chainguard_datarobot.com_python-fips:3.11 +FROM datarobot/mirror_chainguard_datarobot.com_python-fips:3.11 USER root diff --git a/public_dropin_environments/java_codegen/env_info.json b/public_dropin_environments/java_codegen/env_info.json index 663be05d5..c50054b0f 100644 --- a/public_dropin_environments/java_codegen/env_info.json +++ b/public_dropin_environments/java_codegen/env_info.json @@ -4,17 +4,18 @@ "description": "This template can be used as an environment for DataRobot generated scoring code or models that implement the either the IClassificationPredictor or IRegressionPredictor interface from the datarobot-prediction package and for H2O models exported as POJO or MOJO.", "programmingLanguage": "java", "label": "", - "environmentVersionId": "6848b5262081a81707b57882", + "environmentVersionId": "6890c005003d7564bd002d27", "environmentVersionDescription": "", "isPublic": true, + "isDownloadable": true, "useCases": [ "customModel" ], "contextUrl": "https://github.com/datarobot/datarobot-user-models/tree/master/public_dropin_environments/java_codegen", "imageRepository": "env-java-codegen", "tags": [ - "v11.1.0-6848b5262081a81707b57882", - "6848b5262081a81707b57882", - "v11.1.0-latest" + "v11.2.0-6890c005003d7564bd002d27", + "6890c005003d7564bd002d27", + "v11.2.0-latest" ] } diff --git a/public_dropin_environments/java_codegen/requirements.txt b/public_dropin_environments/java_codegen/requirements.txt index e4a8906e1..cfa78e780 100644 --- a/public_dropin_environments/java_codegen/requirements.txt +++ b/public_dropin_environments/java_codegen/requirements.txt @@ -19,8 +19,8 @@ charset-normalizer==3.4.2 click==8.2.1 cryptography==45.0.3 datarobot==3.7.1 -datarobot-drum==1.16.17 -datarobot-mlops==11.1.0a3 +datarobot-drum==1.16.19 +datarobot-mlops==11.1.0 datarobot-storage==2.2.0 deprecated==1.2.18 docker==7.1.0 @@ -59,10 +59,10 @@ opentelemetry-util-http==0.54b1 orjson==3.10.18 packaging==25.0 pandas==2.2.3 -pillow==11.2.1 +pillow==11.3.0 progress==1.6 proto-plus==1.26.1 -protobuf==5.29.4 +protobuf==5.29.5 psutil==7.0.0 py4j==0.10.9.9 pyasn1==0.6.1 @@ -74,7 +74,7 @@ pyjwt[crypto]==2.10.1 python-dateutil==2.9.0.post0 pytz==2025.2 pyyaml==6.0.2 -requests==2.32.3 +requests==2.32.4 requests-toolbelt==1.0.0 rsa==4.9.1 ruamel-yaml==0.17.4 @@ -89,7 +89,7 @@ trafaret==2.1.1 typing-extensions==4.13.2 typing-inspection==0.4.1 tzdata==2025.2 -urllib3==2.4.0 +urllib3==2.5.0 werkzeug==3.1.3 wrapt==1.17.2 zipp==3.22.0 diff --git a/public_dropin_environments/python311/Dockerfile b/public_dropin_environments/python311/Dockerfile index 3f5730fc6..5e0ab8862 100644 --- a/public_dropin_environments/python311/Dockerfile +++ b/public_dropin_environments/python311/Dockerfile @@ -1,11 +1,11 @@ # This is a private chain-guard development image that is stored in DataRobot's private registry. # Replace it with your own development chain-gaurd image if you build your own. -ARG BASE_ROOT_IMAGE=datarobotdev/mirror_chainguard_datarobot.com_python-fips:3.11-dev +ARG BASE_ROOT_IMAGE=datarobot/mirror_chainguard_datarobot.com_python-fips:3.11-dev FROM ${BASE_ROOT_IMAGE} AS build # This is a private production chain-guard image that is stored in DataRobot's private registry. # Replace it with your own production chain-gaurd image if you build your own. -FROM datarobotdev/mirror_chainguard_datarobot.com_python-fips:3.11 +FROM datarobot/mirror_chainguard_datarobot.com_python-fips:3.11 USER root diff --git a/public_dropin_environments/python311/env_info.json b/public_dropin_environments/python311/env_info.json index d458bff79..277db919e 100644 --- a/public_dropin_environments/python311/env_info.json +++ b/public_dropin_environments/python311/env_info.json @@ -1,20 +1,21 @@ { "id": "67a554baeade3a4ce2ab6700", - "name": "[DataRobot] Python 3.11 Drop-In", + "name": "[DataRobot] Python 3.11 Drop-In - Test reconcile", "description": "This template environment can be used to create Python based custom models. User is responsible to provide requirements.txt with the model, to install all the required dependencies.", "programmingLanguage": "python", "label": "", - "environmentVersionId": "6848b5342081a8172fb52002", + "environmentVersionId": "6890f12983913312084bab7c", "environmentVersionDescription": "", "isPublic": true, + "isDownloadable": true, "useCases": [ "customModel" ], "contextUrl": "https://github.com/datarobot/datarobot-user-models/tree/master/public_dropin_environments/python311", "imageRepository": "env-python", "tags": [ - "v11.1.0-6848b5342081a8172fb52002", - "6848b5342081a8172fb52002", - "v11.1.0-latest" + "v11.2.0-6890f12983913312084bab7c", + "6890f12983913312084bab7c", + "v11.2.0-latest" ] } diff --git a/public_dropin_environments/python311/requirements.txt b/public_dropin_environments/python311/requirements.txt index e4a8906e1..cfa78e780 100644 --- a/public_dropin_environments/python311/requirements.txt +++ b/public_dropin_environments/python311/requirements.txt @@ -19,8 +19,8 @@ charset-normalizer==3.4.2 click==8.2.1 cryptography==45.0.3 datarobot==3.7.1 -datarobot-drum==1.16.17 -datarobot-mlops==11.1.0a3 +datarobot-drum==1.16.19 +datarobot-mlops==11.1.0 datarobot-storage==2.2.0 deprecated==1.2.18 docker==7.1.0 @@ -59,10 +59,10 @@ opentelemetry-util-http==0.54b1 orjson==3.10.18 packaging==25.0 pandas==2.2.3 -pillow==11.2.1 +pillow==11.3.0 progress==1.6 proto-plus==1.26.1 -protobuf==5.29.4 +protobuf==5.29.5 psutil==7.0.0 py4j==0.10.9.9 pyasn1==0.6.1 @@ -74,7 +74,7 @@ pyjwt[crypto]==2.10.1 python-dateutil==2.9.0.post0 pytz==2025.2 pyyaml==6.0.2 -requests==2.32.3 +requests==2.32.4 requests-toolbelt==1.0.0 rsa==4.9.1 ruamel-yaml==0.17.4 @@ -89,7 +89,7 @@ trafaret==2.1.1 typing-extensions==4.13.2 typing-inspection==0.4.1 tzdata==2025.2 -urllib3==2.4.0 +urllib3==2.5.0 werkzeug==3.1.3 wrapt==1.17.2 zipp==3.22.0 diff --git a/public_dropin_environments/python311_genai/Dockerfile b/public_dropin_environments/python311_genai/Dockerfile deleted file mode 100644 index 3f5730fc6..000000000 --- a/public_dropin_environments/python311_genai/Dockerfile +++ /dev/null @@ -1,67 +0,0 @@ -# This is a private chain-guard development image that is stored in DataRobot's private registry. -# Replace it with your own development chain-gaurd image if you build your own. -ARG BASE_ROOT_IMAGE=datarobotdev/mirror_chainguard_datarobot.com_python-fips:3.11-dev -FROM ${BASE_ROOT_IMAGE} AS build - -# This is a private production chain-guard image that is stored in DataRobot's private registry. -# Replace it with your own production chain-gaurd image if you build your own. -FROM datarobotdev/mirror_chainguard_datarobot.com_python-fips:3.11 - -USER root - -# Most of the binaries below are just symlinks to busybox and some OCI build tools follow -# symlinks (Docker buildkit) and some do not (Kaniko) so copy this in to be safe. -COPY --from=build /usr/bin/busybox /usr/bin/busybox - -# Required to run the entrypoint script -COPY --from=build /bin/sh /bin/sh - -# Required to change the ownership of copied files into the managed-image -COPY --from=build /bin/chown /bin/chown - -# Required to change the ownership for Custom Models PPS -COPY --from=build /bin/chgrp /bin/chgrp - -# Required to change the permissions of the 'start_server.sh' that is copied into the managed-image -COPY --from=build /bin/chmod /bin/chmod - -# Required for DR backend to create /opt/code (Maybe required by applications other than custom-models) -COPY --from=build /bin/mkdir /bin/mkdir - -# Required for custom-models to install dependencies -COPY --from=build /usr/bin/pip /usr/bin/pip - -# Cleanup '__pycache__' directories. It solves an AsymmetricPrivateKey scanning error. -COPY --from=build /usr/bin/rm /usr/bin/rm -COPY --from=build /usr/bin/find /usr/bin/find - -# Just for convenience -COPY --from=build /bin/ls /bin/ls - -COPY requirements.txt requirements.txt - -ENV VIRTUAL_ENV=/opt/venv - -RUN sh -c "python -m venv ${VIRTUAL_ENV} && \ - . ${VIRTUAL_ENV}/bin/activate && \ - python -m ensurepip --default-pip && \ - python -m pip install --upgrade pip && \ - python -m pip install --no-cache-dir -r requirements.txt && \ - find ${VIRTUAL_ENV} -type d -name '__pycache__' -exec rm -rf {} +" - -ENV PATH=${VIRTUAL_ENV}/bin:${PATH} -ENV HOME=/opt -ENV CODE_DIR=/opt/code -ENV ADDRESS=0.0.0.0:8080 - -# MARK: FUNCTIONAL-TEST-ADD-HERE. (This line is used by DRUM functional test automation and can be safely ignored.) - -# This makes print statements show up in the logs API -ENV WITH_ERROR_SERVER=1 \ - PYTHONUNBUFFERED=1 - - -COPY ./*.sh ${CODE_DIR}/ -WORKDIR ${CODE_DIR} - -ENTRYPOINT ["sh", "-c", "exec ${CODE_DIR}/start_server.sh \"$@\"", "--"] diff --git a/public_dropin_environments/python311_genai/Dockerfile.local b/public_dropin_environments/python311_genai/Dockerfile.local deleted file mode 100644 index dd2508543..000000000 --- a/public_dropin_environments/python311_genai/Dockerfile.local +++ /dev/null @@ -1,28 +0,0 @@ -# This Dockerfile.local can built with the following command: -# > docker build -f Dockerfile.local -t python311-genai-local-dev . -# where the 'python311-genai-local-dev' is the name of the image that will be created. - -FROM python:3.11-slim-bookworm - -ENV VIRTUAL_ENV=/opt/venv - -COPY requirements.txt requirements.txt - -RUN sh -c "python -m venv ${VIRTUAL_ENV} && \ - . ${VIRTUAL_ENV}/bin/activate && \ - python -m pip install --upgrade pip && \ - python -m pip install --no-cache-dir -r requirements.txt" - -ENV PATH=${VIRTUAL_ENV}/bin:${PATH} -ENV HOME=/opt -ENV CODE_DIR=/opt/code -ENV ADDRESS=0.0.0.0:8080 - -# This makes print statements show up in the logs API -ENV WITH_ERROR_SERVER=1 \ - PYTHONUNBUFFERED=1 - -COPY ./*.sh ${CODE_DIR}/ -WORKDIR ${CODE_DIR} - -ENTRYPOINT ["sh", "-c", "exec ${CODE_DIR}/start_server.sh \"$@\"", "--"] diff --git a/public_dropin_environments/python311_genai/README.md b/public_dropin_environments/python311_genai/README.md deleted file mode 100644 index 6093163af..000000000 --- a/public_dropin_environments/python311_genai/README.md +++ /dev/null @@ -1,39 +0,0 @@ -# Python 3 GenAI Drop-In Template Environment - -This template environment can be used to create GenAI-powered custom models and includes common dependencies for workflows using OpenAI, Langchain, vector DBs, or transformers in PyTorch. - -## Supported Libraries - -This environment is built for python 3 and has support for the following libraries. -For specific version information and the complete list of included packages, see [requirements](requirements.txt). - -- openai -- langchain -- transformers -- sentence-transformers -- torch -- faiss-cpu -- numpy -- pandas -- scikit-learn -- optimum -- onnx -- onnxruntime -- google-cloud-aiplatform -- tiktoken - -## Instructions - -1. From the terminal, run `tar -czvf py_dropin.tar.gz -C /path/to/public_dropin_environments/python3_genai/ .` -2. Using either the API or from the UI create a new Custom Environment with the tarball created -in step 1. - -### Creating models for this environment - -To use this environment, your custom model archive will typically contain a `custom.py` file containing the necessary hooks, as well as other files needed for your workflow. You can implement the hook functions such as `load_model` and `score_unstructured`, as documented [here](../../custom_model_runner/README.md) - -Within your `custom.py` code, by importing the necessary dependencies found in this environment, you can implement your Python code under the related custom hook functions, to build your GenAI workflows. - -If you need additional dependencies, you can add those packages in your `requirements.txt` file that you include within your custom model archive and DataRobot will make them available to your custom Python code after you build the environment. - - diff --git a/public_dropin_environments/python311_genai/__init__.py b/public_dropin_environments/python311_genai/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/public_dropin_environments/python311_genai/env_info.json b/public_dropin_environments/python311_genai/env_info.json deleted file mode 100644 index 6a8f7ecfa..000000000 --- a/public_dropin_environments/python311_genai/env_info.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "id": "64d2ba178dd3f0b1fa2162f0", - "name": "[DataRobot] Python 3.11 GenAI", - "description": "This template environment can be used to create GenAI-powered custom models and includes common dependencies for workflows using OpenAI, Langchain, vector DBs, or transformers in PyTorch. Similar to other drop-in environments, you can either include a .pth artifact or any other code needed to deserialize your model, and optionally a custom.py file.", - "programmingLanguage": "python", - "label": "", - "environmentVersionId": "6852ca656041b19b9f55fce0", - "environmentVersionDescription": "", - "isPublic": true, - "useCases": [ - "customModel" - ], - "contextUrl": "https://github.com/datarobot/datarobot-user-models/tree/master/public_dropin_environments/python311_genai", - "imageRepository": "env-python-genai", - "tags": [ - "v11.1.0-6852ca656041b19b9f55fce0", - "6852ca656041b19b9f55fce0", - "v11.1.0-latest" - ] -} diff --git a/public_dropin_environments/python311_genai/fit.sh b/public_dropin_environments/python311_genai/fit.sh deleted file mode 100755 index 69673babc..000000000 --- a/public_dropin_environments/python311_genai/fit.sh +++ /dev/null @@ -1,41 +0,0 @@ -#!/bin/sh -# Copyright 2025 DataRobot, Inc. and its affiliates. -# -# All rights reserved. -# This is proprietary source code of DataRobot, Inc. and its affiliates. -# -# Released under the terms of DataRobot Tool and Utility Agreement. -# You probably don't want to modify this file -cd "${CODEPATH}" || exit 1 -export PYTHONPATH="${CODEPATH}":"${PYTHONPATH}" - -export X="${INPUT_DIRECTORY}/X${TRAINING_DATA_EXTENSION:-.csv}" -export weights="${INPUT_DIRECTORY}/weights.csv" -export sparse_colnames="${INPUT_DIRECTORY}/X.colnames" -export parameters="${INPUT_DIRECTORY}/parameters.json" - -CMD="drum fit --target-type ${TARGET_TYPE} --input ${X} --num-rows ALL --output ${ARTIFACT_DIRECTORY} \ ---code-dir ${CODEPATH} --verbose --enable-fit-metadata " - -if [ "${TARGET_TYPE}" != "anomaly" ]; then - CMD="${CMD} --target-csv ${INPUT_DIRECTORY}/y.csv" -fi - -if [ -f "${weights}" ]; then - CMD="${CMD} --row-weights-csv ${weights}" -fi - -if [ -f "${sparse_colnames}" ]; then - CMD="${CMD} --sparse-column-file ${sparse_colnames}" -fi - -if [ -f "${parameters}" ]; then - CMD="${CMD} --parameter-file ${parameters}" -fi - -if [ -n "${USER_SECRETS_MOUNT_PATH}" ]; then - CMD="${CMD} --user-secrets-mount-path ${USER_SECRETS_MOUNT_PATH}" -fi - -echo "${CMD}" -sh -c "${CMD}" diff --git a/public_dropin_environments/python311_genai/requirements.in b/public_dropin_environments/python311_genai/requirements.in deleted file mode 100644 index ce76122d0..000000000 --- a/public_dropin_environments/python311_genai/requirements.in +++ /dev/null @@ -1,32 +0,0 @@ -datarobot-drum - -cloudpickle -torch -transformers -openai -langchain -optimum -onnxruntime -onnx -sentence-transformers -faiss-cpu -numpy -pandas -scikit-learn -scipy -tiktoken -google-cloud-aiplatform -aws-request-signer -pydantic -pydantic-settings -aiofiles -aioboto3 -rouge-score -fugashi -unidic-lite -llama-index -llama-index-llms-azure-openai -distro -# next version of httpx, 0.28.0, has a bug that breaks opanai library, which is pinned -# https://github.com/openai/openai-python/issues/1902 -httpx==0.27.2 diff --git a/public_dropin_environments/python311_genai/requirements.txt b/public_dropin_environments/python311_genai/requirements.txt deleted file mode 100644 index bfa1cfd82..000000000 --- a/public_dropin_environments/python311_genai/requirements.txt +++ /dev/null @@ -1,210 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.10 -# by the following command: -# -# pip-compile --index-url=https://pypi.org/simple --no-annotate --no-emit-index-url --no-emit-trusted-host --output-file=requirements.txt requirements.in -# -absl-py==2.1.0 -aioboto3==13.4.0 -aiobotocore[boto3]==2.18.0 -aiofiles==24.1.0 -aiohappyeyeballs==2.4.6 -aiohttp==3.11.12 -aioitertools==0.12.0 -aiosignal==1.3.2 -annotated-types==0.7.0 -anyio==4.8.0 -argcomplete==3.5.3 -async-timeout==4.0.3 -attrs==25.1.0 -aws-request-signer==1.2.0 -azure-core==1.32.0 -azure-identity==1.20.0 -azure-storage-blob==12.19.0 -beautifulsoup4==4.13.3 -blinker==1.9.0 -boto3==1.36.1 -botocore==1.36.1 -cachetools==4.2.4 -certifi==2025.1.31 -cffi==1.17.1 -charset-normalizer==3.4.1 -click==8.1.8 -cloudpickle==3.1.1 -coloredlogs==15.0.1 -cryptography==44.0.1 -dataclasses-json==0.6.7 -datarobot==3.6.2 -datarobot-drum==1.16.17 -datarobot-mlops==11.1.0a3 -datarobot-storage==0.0.0 -deprecated==1.2.18 -dirtyjson==1.0.8 -distro==1.9.0 -docker==7.1.0 -exceptiongroup==1.3.0 -faiss-cpu==1.10.0 -filechunkio==1.8 -filelock==3.17.0 -filetype==1.2.0 -flask==3.1.1 -flatbuffers==25.2.10 -frozenlist==1.5.0 -fsspec==2025.2.0 -fugashi==1.4.0 -google-api-core[grpc]==1.34.0 -google-auth==1.28.1 -google-cloud-aiplatform==1.41.0 -google-cloud-bigquery==3.18.0 -google-cloud-core==2.4.1 -google-cloud-resource-manager==1.12.0 -google-cloud-storage==1.43.0 -google-crc32c==1.6.0 -google-resumable-media==2.7.2 -googleapis-common-protos[grpc]==1.67.0 -greenlet==3.1.1 -grpc-google-iam-v1==0.14.0 -grpcio==1.70.0 -grpcio-status==1.48.2 -h11==0.14.0 -httpcore==1.0.7 -httpx==0.27.2 -huggingface-hub==0.28.1 -humanfriendly==10.0 -idna==3.10 -importlib-metadata==8.4.0 -isodate==0.7.2 -itsdangerous==2.2.0 -jinja2==3.1.6 -jiter==0.8.2 -jmespath==1.0.1 -joblib==1.4.2 -jsonpatch==1.33 -jsonpointer==3.0.0 -julia==0.5.7 -langchain==0.3.18 -langchain-core==0.3.35 -langchain-text-splitters==0.3.6 -langsmith==0.3.8 -llama-cloud==0.1.12 -llama-cloud-services==0.6.1 -llama-index==0.12.17 -llama-index-agent-openai==0.4.5 -llama-index-cli==0.4.0 -llama-index-core==0.12.17 -llama-index-embeddings-openai==0.3.1 -llama-index-indices-managed-llama-cloud==0.6.4 -llama-index-llms-azure-openai==0.3.0 -llama-index-llms-openai==0.3.19 -llama-index-multi-modal-llms-openai==0.4.3 -llama-index-program-openai==0.3.1 -llama-index-question-gen-openai==0.3.0 -llama-index-readers-file==0.4.5 -llama-index-readers-llama-parse==0.4.0 -llama-parse==0.6.1 -markupsafe==3.0.2 -marshmallow==3.26.1 -memory-profiler==0.61.0 -mpmath==1.3.0 -msal==1.31.1 -msal-extensions==1.2.0 -multidict==6.1.0 -mypy-extensions==1.0.0 -nest-asyncio==1.6.0 -networkx==3.4.2 -nltk==3.9.1 -numpy==1.26.4 -nvidia-cublas-cu12==12.4.5.8 -nvidia-cuda-cupti-cu12==12.4.127 -nvidia-cuda-nvrtc-cu12==12.4.127 -nvidia-cuda-runtime-cu12==12.4.127 -nvidia-cudnn-cu12==9.1.0.70 -nvidia-cufft-cu12==11.2.1.3 -nvidia-curand-cu12==10.3.5.147 -nvidia-cusolver-cu12==11.6.1.9 -nvidia-cusparse-cu12==12.3.1.170 -nvidia-cusparselt-cu12==0.6.2 -nvidia-nccl-cu12==2.21.5 -nvidia-nvjitlink-cu12==12.4.127 -nvidia-nvtx-cu12==12.4.127 -onnx==1.17.0 -onnxruntime==1.20.1 -openai==1.62.0 -opentelemetry-api==1.27.0 -opentelemetry-exporter-otlp-proto-common==1.27.0 -opentelemetry-exporter-otlp-proto-http==1.27.0 -opentelemetry-instrumentation==0.48b0 -opentelemetry-instrumentation-aiohttp-client==0.48b0 -opentelemetry-instrumentation-requests==0.48b0 -opentelemetry-proto==1.27.0 -opentelemetry-sdk==1.27.0 -opentelemetry-semantic-conventions==0.48b0 -opentelemetry-util-http==0.48b0 -optimum==1.24.0 -orjson==3.10.15 -packaging==24.2 -pandas==2.2.3 -pillow==11.1.0 -portalocker==2.10.1 -progress==1.6 -propcache==0.2.1 -proto-plus==1.26.0 -protobuf==3.20.3 -psutil==6.1.1 -py4j==0.10.9.9 -pyasn1==0.6.1 -pyasn1-modules==0.4.1 -pycparser==2.22 -pydantic==2.10.6 -pydantic-core==2.27.2 -pydantic-settings==2.7.1 -pyjwt[crypto]==2.10.1 -pypdf==5.3.0 -python-dateutil==2.9.0.post0 -python-dotenv==1.0.1 -pytz==2025.1 -pyyaml==6.0.2 -regex==2024.11.6 -requests==2.32.3 -requests-toolbelt==1.0.0 -rouge-score==0.1.2 -rsa==4.9 -ruamel-yaml==0.17.4 -s3transfer==0.11.2 -safetensors==0.5.2 -scikit-learn==1.6.1 -scipy==1.15.1 -sentence-transformers==3.4.1 -shapely==2.0.7 -six==1.17.0 -sniffio==1.3.1 -soupsieve==2.6 -sqlalchemy[asyncio]==2.0.38 -strenum==0.4.15 -strictyaml==1.4.2 -striprtf==0.0.26 -sympy==1.13.1 -tenacity==9.0.0 -termcolor==2.5.0 -texttable==1.7.0 -threadpoolctl==3.5.0 -tiktoken==0.8.0 -tokenizers==0.21.0 -torch==2.6.0 -tqdm==4.67.1 -trafaret==2.1.1 -transformers==4.48.3 -triton==3.2.0 -typing-extensions==4.12.2 -typing-inspect==0.9.0 -tzdata==2025.1 -unidic-lite==1.0.8 -urllib3==1.26.20 -werkzeug==3.1.3 -wrapt==1.17.2 -yarl==1.18.3 -zipp==3.21.0 -zstandard==0.23.0 - -# The following packages are considered to be unsafe in a requirements file: -# setuptools diff --git a/public_dropin_environments/python311_genai/start_server.sh b/public_dropin_environments/python311_genai/start_server.sh deleted file mode 100755 index 1bb32cd0d..000000000 --- a/public_dropin_environments/python311_genai/start_server.sh +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/sh -# Copyright 2025 DataRobot, Inc. and its affiliates. -# -# All rights reserved. -# This is proprietary source code of DataRobot, Inc. and its affiliates. -# -# Released under the terms of DataRobot Tool and Utility Agreement. -echo "Starting Custom Model environment with DRUM prediction server" - -if [ "${ENABLE_CUSTOM_MODEL_RUNTIME_ENV_DUMP}" = 1 ]; then - echo "Environment variables:" - env -fi - -echo -echo "Executing command: drum server $*" -echo -exec drum server "$@" diff --git a/public_dropin_environments/python311_genai_agents/Dockerfile b/public_dropin_environments/python311_genai_agents/Dockerfile index b9e415708..dc741352b 100644 --- a/public_dropin_environments/python311_genai_agents/Dockerfile +++ b/public_dropin_environments/python311_genai_agents/Dockerfile @@ -14,7 +14,7 @@ ARG UNAME=notebooks ARG UID=10101 ARG GID=10101 -FROM datarobotdev/mirror_chainguard_datarobot.com_python-fips:3.11-dev AS base +FROM datarobot/mirror_chainguard_datarobot.com_python-fips:3.11-dev AS base ARG UNAME ARG UID @@ -88,19 +88,18 @@ EXPOSE 22 FROM base AS builder # this stage has only bare minimal of dependencies installed to optimize build time for the local development +ENV ANNOY_COMPILER_ARGS="-D_CRT_SECURE_NO_WARNINGS,-DANNOYLIB_MULTITHREADED_BUILD,-march=x86-64" + ARG WORKDIR ARG VENV_PATH COPY ./requirements.txt ${WORKDIR}/ COPY ./agent/requirements-agent.txt ${WORKDIR}/ -# hadolint ignore=DL3013 +# hadolint ignore=DL3013, SC1091 RUN source ${VENV_PATH}/bin/activate && \ uv pip install --no-cache-dir -r ${WORKDIR}/requirements.txt && rm ${WORKDIR}/requirements.txt && \ - uv pip install --no-cache-dir -r ${WORKDIR}/requirements-agent.txt && rm ${WORKDIR}/requirements-agent.txt && \ - # Generative AI Agents - uv pip uninstall litellm && \ - uv pip install --no-cache-dir litellm>=1.72.1 + uv pip install --no-cache-dir -r ${WORKDIR}/requirements-agent.txt && rm ${WORKDIR}/requirements-agent.txt # Copy agent runtime into work directory COPY ./run_agent.py ${WORKDIR}/ diff --git a/public_dropin_environments/python311_genai_agents/Dockerfile.local b/public_dropin_environments/python311_genai_agents/Dockerfile.local index 535f3c23e..2990363cf 100644 --- a/public_dropin_environments/python311_genai_agents/Dockerfile.local +++ b/public_dropin_environments/python311_genai_agents/Dockerfile.local @@ -128,19 +128,18 @@ EXPOSE 22 FROM base AS builder # this stage has only bare minimal of dependencies installed to optimize build time for the local development +ENV ANNOY_COMPILER_ARGS="-D_CRT_SECURE_NO_WARNINGS,-DANNOYLIB_MULTITHREADED_BUILD,-march=x86-64" + ARG WORKDIR ARG VENV_PATH COPY ./requirements.txt ${WORKDIR}/ COPY ./agent/requirements-agent.txt ${WORKDIR}/ -# hadolint ignore=SC1091 +# hadolint ignore=DL3013, SC1091 RUN source ${VENV_PATH}/bin/activate && \ uv pip install --no-cache-dir -r ${WORKDIR}/requirements.txt && rm ${WORKDIR}/requirements.txt && \ - uv pip install --no-cache-dir -r ${WORKDIR}/requirements-agent.txt && rm ${WORKDIR}/requirements-agent.txt && \ - # Generative AI Agents - uv pip uninstall litellm && \ - uv pip install --no-cache-dir litellm>=1.72.1 + uv pip install --no-cache-dir -r ${WORKDIR}/requirements-agent.txt && rm ${WORKDIR}/requirements-agent.txt # Copy agent runtime into work directory COPY ./run_agent.py ${WORKDIR}/ diff --git a/public_dropin_environments/python311_genai_agents/env_info.json b/public_dropin_environments/python311_genai_agents/env_info.json index 517e2caca..379d74391 100644 --- a/public_dropin_environments/python311_genai_agents/env_info.json +++ b/public_dropin_environments/python311_genai_agents/env_info.json @@ -4,9 +4,10 @@ "description": "This template environment can be used to create GenAI-powered agents using CrewAI, LangGraph, or Llama-Index. Similar to other drop-in environments, you can either include a .pth artifact or any other code needed to deserialize your model, and optionally a custom.py file. You can also use this environment in codespaces.", "programmingLanguage": "python", "label": "", - "environmentVersionId": "68595fa68a68ab0fab92e623", + "environmentVersionId": "6890c0050071ec646f00124b", "environmentVersionDescription": "", "isPublic": true, + "isDownloadable": true, "useCases": [ "notebook", "customModel" @@ -14,8 +15,8 @@ "contextUrl": "https://github.com/datarobot/datarobot-user-models/tree/master/public_dropin_environments/python311_genai_agents", "imageRepository": "env-python-genai-agents", "tags": [ - "v11.1.0-68595fa68a68ab0fab92e623", - "68595fa68a68ab0fab92e623", - "v11.1.0-latest" + "v11.2.0-6890c0050071ec646f00124b", + "6890c0050071ec646f00124b", + "v11.2.0-latest" ] } diff --git a/public_dropin_environments/python311_genai_agents/requirements.in b/public_dropin_environments/python311_genai_agents/requirements.in index 3351ebf93..2f95fb17a 100644 --- a/public_dropin_environments/python311_genai_agents/requirements.in +++ b/public_dropin_environments/python311_genai_agents/requirements.in @@ -8,36 +8,51 @@ ipykernel<6.29.0 pandas numpy mistune -datarobot-drum>=1.16.16 -datarobot numpy uwsgi + +# CVE Updates flask>=3.1.1 requests>=2.32.4 protobuf>=5.29.5,<6.0.0 +starlette>=0.40.0 +pillow>=11.3.0 +urllib3>=2.5.0 -# Generative AI Frameworks -crewai -crewai-tools -langgraph -langchain -langchain-community -llama-index -llama-index-llms-openai -llama-index-llms-langchain -llama-index-llms-litellm -openai +# GenAI Environment +click~=8.1.8 +crewai>=0.140.0 +crewai-tools>=0.48.0 +datarobot-drum>=1.16.19 +datarobot-moderations~=11.1.23 +datarobot-mlops>=11.1.0 +datarobot>=3.7.0 +dotenv~=0.9.9 +langchain-community~=0.3.23 +langchain~=0.3.23 +langgraph~=0.4.10 +langgraph-prebuilt~=0.2.3 +legacy-cgi~=2.6.3 +litellm>=1.72.1 +llama-index-core>=0.12.49 +llama-index-llms-langchain~=0.6.1 +llama-index-llms-litellm~=0.4.1 +llama-index-llms-openai~=0.4.7 +llama-index~=0.12.50 +multidict~=6.5.0 +onnxruntime~=1.22.0 +openai~=1.97.0 +opentelemetry-api~=1.33.0 +opentelemetry-instrumentation-aiohttp-client~=0.54b0 +opentelemetry-instrumentation-crewai~=0.40.5 +opentelemetry-instrumentation-httpx~=0.54b0 +opentelemetry-instrumentation-langchain~=0.40.5 +opentelemetry-instrumentation-llamaindex~=0.40.5 +opentelemetry-instrumentation-openai~=0.40.5 +opentelemetry-instrumentation-requests~=0.54b0 +opentelemetry-sdk~=1.33.0 +python-dotenv~=1.1.0 ragas @ git+https://github.com/explodinggradients/ragas@5d59549ad5ef511f621502c563bc55ac5aeb9188#subdirectory=ragas -datarobot-moderations>=11.1.17 - -# Generative AI Telemetry -traceloop-sdk -opentelemetry-api -opentelemetry-sdk -opentelemetry-instrumentation-aiohttp-client -opentelemetry-instrumentation-crewai -opentelemetry-instrumentation-httpx -opentelemetry-instrumentation-langchain -opentelemetry-instrumentation-llamaindex -opentelemetry-instrumentation-openai -opentelemetry-instrumentation-requests +requests~=2.32.4 +traceloop-sdk~=0.40.2 +uvicorn~=0.35.0 diff --git a/public_dropin_environments/python311_genai_agents/requirements.txt b/public_dropin_environments/python311_genai_agents/requirements.txt index 3259923d6..489a31359 100644 --- a/public_dropin_environments/python311_genai_agents/requirements.txt +++ b/public_dropin_environments/python311_genai_agents/requirements.txt @@ -4,69 +4,68 @@ # # pip-compile --index-url=https://pypi.org/simple --no-annotate --no-emit-index-url --no-emit-trusted-host --output-file=requirements.txt requirements.in # -absl-py==2.3.0 -aioboto3==14.3.0 -aiobotocore[boto3]==2.22.0 +absl-py==2.3.1 +aioboto3==15.0.0 +aiobotocore[boto3]==2.23.0 aiofiles==24.1.0 aiohappyeyeballs==2.6.1 -aiohttp==3.11.18 +aiohttp==3.12.14 aioitertools==0.12.0 -aiosignal==1.3.2 +aiosignal==1.4.0 aiosqlite==0.21.0 -alembic==1.15.2 +alembic==1.16.4 annotated-types==0.7.0 annoy==1.17.3 anthropic==0.49.0 anyio==4.9.0 appdirs==1.4.4 argcomplete==3.6.2 -argon2-cffi==23.1.0 +argon2-cffi==25.1.0 argon2-cffi-bindings==21.2.0 arrow==1.3.0 -asgiref==3.8.1 +asgiref==3.9.1 asttokens==3.0.0 async-timeout==4.0.3 -asyncio==3.4.3 attrs==25.3.0 -auth0-python==4.9.0 -azure-core==1.34.0 -azure-identity==1.22.0 +azure-core==1.35.0 +azure-identity==1.23.1 azure-storage-blob==12.19.0 backoff==2.2.1 +backports-asyncio-runner==1.2.0 backports-tarfile==1.2.0 -banks==2.1.2 +banks==2.2.0 bcrypt==4.3.0 beautifulsoup4==4.13.4 black==25.1.0 bleach[css]==6.2.0 blinker==1.9.0 -boto3==1.37.3 -botocore==1.37.3 +boto3==1.38.27 +botocore==1.38.27 build==1.2.2.post1 cachetools==5.5.2 -certifi==2025.4.26 +certifi==2025.7.14 cffi==1.17.1 charset-normalizer==3.4.2 chroma-hnswlib==0.7.6 chromadb==0.5.23 click==8.1.8 -cohere==5.15.0 +cohere==5.16.1 colorama==0.4.6 coloredlogs==15.0.1 comm==0.2.2 -coverage==7.8.2 -crewai==0.119.0 -crewai-tools==0.44.0 -cryptography==44.0.3 +coverage==7.9.2 +crewai==0.148.0 +crewai-tools==0.55.0 +cryptography==45.0.5 dataclasses-json==0.6.7 -datarobot==3.7.1 -datarobot-drum==1.16.16 -datarobot-mlops==11.1.0a3 -datarobot-moderations==11.1.17 +datarobot==3.8.2 +datarobot-drum==1.16.20 +datarobot-mlops==11.1.0 +datarobot-moderations==11.1.23 datarobot-predict==1.13.2 datarobot-storage==2.2.0 -datasets==3.6.0 -debugpy==1.8.14 +datasets==4.0.0 +debugpy==1.8.15 decorator==5.2.1 deepeval==2.7.9 defusedxml==0.7.1 @@ -77,17 +76,18 @@ dirtyjson==1.0.8 diskcache==5.6.3 distro==1.9.0 docker==7.1.0 -docstring-parser==0.16 +docstring-parser==0.17.0 docutils==0.21.2 -durationpy==0.9 +dotenv==0.9.9 +durationpy==0.10 ecs-logging==2.2.0 embedchain==0.1.128 et-xmlfile==2.0.0 exceptiongroup==1.3.0 execnet==2.1.1 executing==2.2.0 -fastapi==0.115.12 -fastavro==1.10.0 +fastapi==0.116.1 +fastavro==1.11.1 fastembed==0.6.0 fastjsonschema==2.21.1 filechunkio==1.8 @@ -96,34 +96,34 @@ filetype==1.2.0 flask==3.1.1 flatbuffers==25.2.10 fqdn==1.5.1 -frozenlist==1.6.0 +frozenlist==1.7.0 fsspec[http]==2025.3.0 -google-api-core[grpc]==2.24.2 -google-auth==2.40.1 -google-cloud-aiplatform==1.96.0 -google-cloud-bigquery==3.34.0 +google-api-core[grpc]==2.25.1 +google-auth==2.40.3 +google-cloud-aiplatform==1.104.0 +google-cloud-bigquery==3.35.0 google-cloud-core==2.4.3 google-cloud-resource-manager==1.14.2 google-cloud-storage==2.19.0 google-crc32c==1.7.1 -google-genai==1.19.0 +google-genai==1.26.0 google-resumable-media==2.7.2 googleapis-common-protos[grpc]==1.70.0 gptcache==0.1.44 -greenlet==3.2.2 -griffe==1.7.3 +greenlet==3.2.3 +griffe==1.8.0 grpc-google-iam-v1==0.14.2 -grpcio==1.71.0 -grpcio-status==1.71.0 +grpcio==1.73.1 +grpcio-status==1.71.2 h11==0.16.0 h2==4.2.0 -hf-xet==1.1.0 +hf-xet==1.1.5 hpack==4.1.0 httpcore==1.0.9 httptools==0.6.4 httpx[http2]==0.28.1 httpx-sse==0.4.0 -huggingface-hub==0.31.1 +huggingface-hub==0.33.4 humanfriendly==10.0 hyperframe==6.1.0 idna==3.10 @@ -131,7 +131,7 @@ importlib-metadata==8.6.1 importlib-resources==6.5.2 inflection==0.5.1 iniconfig==2.1.0 -instructor==1.8.1 +instructor==1.10.0 ipykernel==6.28.0 ipython==8.37.0 isodate==0.7.2 @@ -139,69 +139,71 @@ isoduration==20.11.0 itsdangerous==2.2.0 jaraco-classes==3.4.0 jaraco-context==6.0.1 -jaraco-functools==4.1.0 +jaraco-functools==4.2.1 jedi==0.19.2 jeepney==0.9.0 jinja2==3.1.6 -jiter==0.8.2 +jiter==0.10.0 jmespath==1.0.1 -joblib==1.5.0 -json-repair==0.44.1 +joblib==1.5.1 +json-repair==0.25.2 json5==0.12.0 jsonpatch==1.33 -jsonpickle==4.0.5 +jsonpickle==4.1.1 jsonpointer==3.0.0 jsonref==1.1.0 -jsonschema[format-nongpl]==4.23.0 +jsonschema[format-nongpl]==4.25.0 jsonschema-specifications==2025.4.1 julia==0.5.7 jupyter-client==8.6.3 jupyter-core==5.8.1 jupyter-events==0.12.0 jupyter-kernel-gateway==3.0.1 -jupyter-server==2.15.0 +jupyter-server==2.16.0 jupyter-server-terminals==0.5.3 jupyterlab-pygments==0.3.0 keyring==25.6.0 -kubernetes==32.0.1 -lancedb==0.22.0 -langchain==0.3.25 +kubernetes==33.1.0 +lancedb==0.24.1 +langchain==0.3.26 langchain-cohere==0.3.5 -langchain-community==0.3.23 -langchain-core==0.3.59 +langchain-community==0.3.27 +langchain-core==0.3.71 langchain-experimental==0.3.4 -langchain-nvidia-ai-endpoints==0.3.10 +langchain-nvidia-ai-endpoints==0.3.12 langchain-openai==0.2.14 langchain-text-splitters==0.3.8 -langgraph==0.4.3 -langgraph-checkpoint==2.0.25 -langgraph-prebuilt==0.1.8 -langgraph-sdk==0.1.66 -langsmith==0.3.42 +langgraph==0.4.10 +langgraph-checkpoint==2.1.1 +langgraph-prebuilt==0.2.3 +langgraph-sdk==0.2.0 +langsmith==0.3.45 lark==1.2.2 -litellm==1.68.0 -llama-cloud==0.1.21 -llama-cloud-services==0.6.15 -llama-index==0.12.35 -llama-index-agent-openai==0.4.7 -llama-index-cli==0.4.1 -llama-index-core==0.12.40 -llama-index-embeddings-azure-openai==0.3.7 +legacy-cgi==2.6.3 +litellm==1.72.6 +llama-cloud==0.1.32 +llama-cloud-services==0.6.43 +llama-index==0.12.51 +llama-index-agent-openai==0.4.12 +llama-index-cli==0.4.4 +llama-index-core==0.12.52 +llama-index-embeddings-azure-openai==0.3.9 llama-index-embeddings-openai==0.3.1 -llama-index-indices-managed-llama-cloud==0.6.11 -llama-index-llms-azure-openai==0.3.2 -llama-index-llms-bedrock-converse==0.7.1 -llama-index-llms-fireworks==0.3.2 +llama-index-indices-managed-llama-cloud==0.7.10 +llama-index-instrumentation==0.3.0 +llama-index-llms-azure-openai==0.3.4 +llama-index-llms-bedrock-converse==0.7.6 llama-index-llms-langchain==0.6.1 llama-index-llms-litellm==0.4.2 -llama-index-llms-openai==0.3.38 +llama-index-llms-openai==0.4.7 llama-index-llms-vertex==0.5.0 -llama-index-multi-modal-llms-openai==0.4.3 -llama-index-program-openai==0.3.1 -llama-index-question-gen-openai==0.3.0 -llama-index-readers-file==0.4.7 +llama-index-multi-modal-llms-openai==0.5.3 +llama-index-program-openai==0.3.2 +llama-index-question-gen-openai==0.3.1 +llama-index-readers-file==0.4.11 llama-index-readers-llama-parse==0.4.0 -llama-parse==0.6.12 +llama-index-workflows==1.1.0 +llama-parse==0.6.43 loguru==0.7.3 mako==1.3.10 markdown-it-py==3.0.0 @@ -209,100 +211,101 @@ markupsafe==3.0.2 marshmallow==3.26.1 matplotlib-inline==0.1.7 mdurl==0.1.2 -mem0ai==0.1.98 +mem0ai==0.1.114 memory-profiler==0.61.0 mistune==3.1.3 mmh3==5.1.0 monotonic==1.6 more-itertools==10.7.0 mpmath==1.3.0 -msal==1.32.3 +msal==1.33.0 msal-extensions==1.3.1 -multidict==6.4.3 +multidict==6.5.1 multiprocess==0.70.16 mypy-extensions==1.1.0 nbclient==0.10.2 nbconvert==7.16.6 nbformat==5.10.4 -nemoguardrails==0.14.0 +nemoguardrails==0.14.1 nest-asyncio==1.6.0 networkx==3.4.2 -nh3==0.2.21 +nh3==0.3.0 nltk==3.9.1 nodeenv==1.9.1 -numpy==2.2.5 -oauthlib==3.2.2 +numpy==2.2.6 +oauthlib==3.3.1 ollama==0.5.1 -onnxruntime==1.21.1 -openai==1.75.0 +onnxruntime==1.22.0 +openai==1.97.1 openpyxl==3.1.5 -opentelemetry-api==1.33.0 -opentelemetry-exporter-otlp-proto-common==1.33.0 -opentelemetry-exporter-otlp-proto-grpc==1.33.0 -opentelemetry-exporter-otlp-proto-http==1.33.0 -opentelemetry-instrumentation==0.54b0 -opentelemetry-instrumentation-aiohttp-client==0.54b0 -opentelemetry-instrumentation-alephalpha==0.40.3 -opentelemetry-instrumentation-anthropic==0.40.3 -opentelemetry-instrumentation-asgi==0.54b0 -opentelemetry-instrumentation-bedrock==0.40.3 -opentelemetry-instrumentation-chromadb==0.40.3 -opentelemetry-instrumentation-cohere==0.40.3 -opentelemetry-instrumentation-crewai==0.40.3 -opentelemetry-instrumentation-fastapi==0.54b0 -opentelemetry-instrumentation-google-generativeai==0.40.3 -opentelemetry-instrumentation-groq==0.40.3 -opentelemetry-instrumentation-haystack==0.40.3 -opentelemetry-instrumentation-httpx==0.54b0 -opentelemetry-instrumentation-lancedb==0.40.3 -opentelemetry-instrumentation-langchain==0.40.3 -opentelemetry-instrumentation-llamaindex==0.40.3 -opentelemetry-instrumentation-logging==0.54b0 -opentelemetry-instrumentation-marqo==0.40.3 -opentelemetry-instrumentation-mcp==0.40.3 -opentelemetry-instrumentation-milvus==0.40.3 -opentelemetry-instrumentation-mistralai==0.40.3 -opentelemetry-instrumentation-ollama==0.40.3 -opentelemetry-instrumentation-openai==0.40.3 -opentelemetry-instrumentation-pinecone==0.40.3 -opentelemetry-instrumentation-qdrant==0.40.3 -opentelemetry-instrumentation-replicate==0.40.3 -opentelemetry-instrumentation-requests==0.54b0 -opentelemetry-instrumentation-sagemaker==0.40.3 -opentelemetry-instrumentation-sqlalchemy==0.54b0 -opentelemetry-instrumentation-threading==0.54b0 -opentelemetry-instrumentation-together==0.40.3 -opentelemetry-instrumentation-transformers==0.40.3 -opentelemetry-instrumentation-urllib3==0.54b0 -opentelemetry-instrumentation-vertexai==0.40.3 -opentelemetry-instrumentation-watsonx==0.40.3 -opentelemetry-instrumentation-weaviate==0.40.3 -opentelemetry-proto==1.33.0 -opentelemetry-sdk==1.33.0 -opentelemetry-semantic-conventions==0.54b0 -opentelemetry-semantic-conventions-ai==0.4.5 -opentelemetry-util-http==0.54b0 -orjson==3.10.18 -ormsgpack==1.9.1 +opentelemetry-api==1.33.1 +opentelemetry-exporter-otlp-proto-common==1.33.1 +opentelemetry-exporter-otlp-proto-grpc==1.33.1 +opentelemetry-exporter-otlp-proto-http==1.33.1 +opentelemetry-instrumentation==0.54b1 +opentelemetry-instrumentation-aiohttp-client==0.54b1 +opentelemetry-instrumentation-alephalpha==0.40.14 +opentelemetry-instrumentation-anthropic==0.40.14 +opentelemetry-instrumentation-asgi==0.54b1 +opentelemetry-instrumentation-bedrock==0.40.14 +opentelemetry-instrumentation-chromadb==0.40.14 +opentelemetry-instrumentation-cohere==0.40.14 +opentelemetry-instrumentation-crewai==0.40.14 +opentelemetry-instrumentation-fastapi==0.54b1 +opentelemetry-instrumentation-google-generativeai==0.40.14 +opentelemetry-instrumentation-groq==0.40.14 +opentelemetry-instrumentation-haystack==0.40.14 +opentelemetry-instrumentation-httpx==0.54b1 +opentelemetry-instrumentation-lancedb==0.40.14 +opentelemetry-instrumentation-langchain==0.40.14 +opentelemetry-instrumentation-llamaindex==0.40.14 +opentelemetry-instrumentation-logging==0.54b1 +opentelemetry-instrumentation-marqo==0.40.14 +opentelemetry-instrumentation-mcp==0.40.14 +opentelemetry-instrumentation-milvus==0.40.14 +opentelemetry-instrumentation-mistralai==0.40.14 +opentelemetry-instrumentation-ollama==0.40.14 +opentelemetry-instrumentation-openai==0.40.14 +opentelemetry-instrumentation-pinecone==0.40.14 +opentelemetry-instrumentation-qdrant==0.40.14 +opentelemetry-instrumentation-redis==0.54b1 +opentelemetry-instrumentation-replicate==0.40.14 +opentelemetry-instrumentation-requests==0.54b1 +opentelemetry-instrumentation-sagemaker==0.40.14 +opentelemetry-instrumentation-sqlalchemy==0.54b1 +opentelemetry-instrumentation-threading==0.54b1 +opentelemetry-instrumentation-together==0.40.14 +opentelemetry-instrumentation-transformers==0.40.14 +opentelemetry-instrumentation-urllib3==0.54b1 +opentelemetry-instrumentation-vertexai==0.40.14 +opentelemetry-instrumentation-watsonx==0.40.14 +opentelemetry-instrumentation-weaviate==0.40.14 +opentelemetry-proto==1.33.1 +opentelemetry-sdk==1.33.1 +opentelemetry-semantic-conventions==0.54b1 +opentelemetry-semantic-conventions-ai==0.4.9 +opentelemetry-util-http==0.54b1 +orjson==3.11.0 +ormsgpack==1.10.0 overrides==7.7.0 -packaging==24.2 +packaging==25.0 pandas==2.2.3 pandocfilters==1.5.1 parso==0.8.4 pathspec==0.12.1 -pdfminer-six==20250327 -pdfplumber==0.11.6 +pdfminer-six==20250506 +pdfplumber==0.11.7 pexpect==4.9.0 -pillow==11.2.1 +pillow==11.3.0 pkginfo==1.10.0 platformdirs==4.3.8 pluggy==1.6.0 -portalocker==2.10.1 +portalocker==3.2.0 posthog==3.25.0 -progress==1.6 -prometheus-client==0.21.1 +progress==1.6.1 +prometheus-client==0.22.1 prompt-toolkit==3.0.51 -propcache==0.3.1 +propcache==0.3.2 proto-plus==1.26.1 protobuf==5.29.5 psutil==7.0.0 @@ -310,35 +313,35 @@ ptyprocess==0.7.0 pure-eval==0.2.3 py-rust-stemmers==0.1.5 py4j==0.10.9.9 -pyarrow==20.0.0 +pyarrow==21.0.0 pyasn1==0.6.1 pyasn1-modules==0.4.2 pycparser==2.22 -pydantic==2.11.4 +pydantic==2.11.7 pydantic-core==2.33.2 -pydantic-settings==2.9.1 -pygments==2.19.1 +pydantic-settings==2.10.1 +pygments==2.19.2 pyjwt[crypto]==2.10.1 -pypdf==5.4.0 -pypdfium2==4.30.1 +pypdf==5.8.0 +pypdfium2==4.30.0 pypika==0.48.9 pyproject-hooks==1.2.0 -pyright==1.1.400 +pyright==1.1.403 pysbd==0.3.4 -pytest==8.4.0 -pytest-asyncio==1.0.0 +pytest==8.4.1 +pytest-asyncio==1.1.0 pytest-repeat==0.9.4 pytest-rerunfailures==12.0 -pytest-xdist==3.7.0 +pytest-xdist==3.8.0 python-dateutil==2.9.0.post0 -python-dotenv==1.1.0 +python-dotenv==1.1.1 python-json-logger==3.3.0 pytube==15.0.0 -pytz==2024.2 +pytz==2025.2 pyvis==0.3.2 pyyaml==6.0.2 -pyzmq==26.4.0 -qdrant-client==1.14.2 +pyzmq==27.0.0 +qdrant-client==1.15.0 ragas @ git+https://github.com/explodinggradients/ragas@5d59549ad5ef511f621502c563bc55ac5aeb9188#subdirectory=ragas readme-renderer==44.0 referencing==0.36.2 @@ -349,32 +352,33 @@ requests-toolbelt==1.0.0 rfc3339-validator==0.1.4 rfc3986==2.0.0 rfc3986-validator==0.1.1 +rfc3987-syntax==1.1.0 rich==13.9.4 rouge-score==0.1.2 -rpds-py==0.24.0 +rpds-py==0.26.0 rsa==4.9.1 ruamel-yaml==0.17.4 -s3transfer==0.11.3 +s3transfer==0.13.1 schema==0.7.7 scipy==1.15.3 secretstorage==3.3.3 send2trash==1.8.3 -sentry-sdk==2.29.1 +sentry-sdk==2.33.2 shapely==2.1.1 shellingham==1.5.4 simpleeval==1.0.3 six==1.17.0 sniffio==1.3.1 soupsieve==2.7 -sqlalchemy[asyncio]==2.0.40 +sqlalchemy[asyncio]==2.0.41 stack-data==0.6.3 -starlette==0.46.2 +starlette==0.47.2 strenum==0.4.15 strictyaml==1.4.2 striprtf==0.0.26 sympy==1.14.0 tabulate==0.9.0 -tenacity==9.0.0 +tenacity==8.5.0 termcolor==3.1.0 terminado==0.18.1 texttable==1.7.0 @@ -383,27 +387,27 @@ tinycss2==1.4.0 tokenizers==0.20.3 tomli==2.2.1 tomli-w==1.2.0 -tornado==6.5 +tornado==6.5.1 tqdm==4.67.1 -traceloop-sdk==0.40.3 +traceloop-sdk==0.40.14 trafaret==2.1.1 traitlets==5.14.3 twine==5.1.1 -typer==0.15.3 -types-python-dateutil==2.9.0.20241206 -types-requests==2.32.0.20250328 -typing-extensions==4.13.2 +typer==0.16.0 +types-python-dateutil==2.9.0.20250708 +types-requests==2.32.4.20250611 +typing-extensions==4.14.1 typing-inspect==0.9.0 -typing-inspection==0.4.0 +typing-inspection==0.4.1 tzdata==2025.2 uri-template==1.3.0 -urllib3==2.4.0 -uv==0.7.3 -uvicorn[standard]==0.34.2 +urllib3==2.5.0 +uv==0.8.2 +uvicorn[standard]==0.35.0 uvloop==0.21.0 -uwsgi==2.0.29 +uwsgi==2.0.30 watchdog==6.0.0 -watchfiles==1.0.5 +watchfiles==1.1.0 wcwidth==0.2.13 webcolors==24.11.1 webencodings==0.5.1 @@ -413,8 +417,8 @@ werkzeug==3.1.3 wheel==0.45.1 wrapt==1.17.2 xxhash==3.5.0 -yarl==1.20.0 -zipp==3.21.0 +yarl==1.20.1 +zipp==3.23.0 zstandard==0.23.0 # The following packages are considered to be unsafe in a requirements file: diff --git a/public_dropin_environments/python311_genai_agents/run_agent.py b/public_dropin_environments/python311_genai_agents/run_agent.py index 2a71fc737..a645e22b6 100644 --- a/public_dropin_environments/python311_genai_agents/run_agent.py +++ b/public_dropin_environments/python311_genai_agents/run_agent.py @@ -19,11 +19,12 @@ import socket import sys from pathlib import Path -from typing import Any, TextIO +from typing import Any, TextIO, cast from urllib.parse import urlparse, urlunparse import requests from datarobot_drum.drum.enum import TargetType +from datarobot_drum.drum.root_predictors.drum_inline_utils import drum_inline_predictor from datarobot_drum.drum.root_predictors.drum_server_utils import DrumServerRun from openai import OpenAI from openai.types.chat import ChatCompletion @@ -88,6 +89,11 @@ def argparse_args() -> argparse.Namespace: default=None, help="Custom attributes for tracing. Should be a JSON dictionary.", ) + parser.add_argument( + "--use_serverless", + action="store_true", + help="Use DRUM serverless predictor.", + ) args = parser.parse_args() return args @@ -128,15 +134,26 @@ def setup_otel_env_variables(entity_id: str) -> None: root.info("OTEL_EXPORTER_OTLP_ENDPOINT or OTEL_EXPORTER_OTLP_HEADERS already set, skipping") return - datarobot_endpoint = os.environ.get("DATAROBOT_ENDPOINT") + datarobot_endpoint = os.environ.get("DATAROBOT_ENDPOINT", "") datarobot_api_token = os.environ.get("DATAROBOT_API_TOKEN") - if not datarobot_endpoint or not datarobot_api_token: - root.warning("DATAROBOT_ENDPOINT or DATAROBOT_API_TOKEN not set, tracing is disabled") + otlp_endpoint = os.environ.get("DATAROBOT_OTEL_COLLECTOR_BASE_URL", "") + + if not (datarobot_endpoint or otlp_endpoint): + root.warning( + "DATAROBOT_ENDPOINT or DATAROBOT_OTEL_COLLECTOR_BASE_URL not set, tracing is disabled" + ) + return + + if not datarobot_api_token: + root.warning("DATAROBOT_API_TOKEN not set, tracing is disabled") return - parsed_url = urlparse(datarobot_endpoint) - stripped_url = (parsed_url.scheme, parsed_url.netloc, "otel", "", "", "") - otlp_endpoint = urlunparse(stripped_url) + if not otlp_endpoint: + assert datarobot_endpoint is not None # mypy + parsed_url = urlparse(datarobot_endpoint) + stripped_url = (parsed_url.scheme, parsed_url.netloc, "otel", "", "", "") + otlp_endpoint = urlunparse(stripped_url) + otlp_headers = f"X-DataRobot-Api-Key={datarobot_api_token},X-DataRobot-Entity-Id={entity_id}" os.environ["OTEL_EXPORTER_OTLP_ENDPOINT"] = otlp_endpoint os.environ["OTEL_EXPORTER_OTLP_HEADERS"] = otlp_headers @@ -235,6 +252,24 @@ def execute_drum( return completion +def execute_drum_inline( + chat_completion: CompletionCreateParamsBase, + custom_model_dir: Path, +) -> ChatCompletion: + root.info("Executing agent as [chat] endpoint. DRUM Inline Executor.") + + root.info("Starting DRUM runner.") + with drum_inline_predictor( + target_type=TargetType.AGENTIC_WORKFLOW.value, + custom_model_dir=custom_model_dir, + target_name="response", + ) as predictor: + root.info("Executing Agent") + completion = predictor.chat(chat_completion) + + return cast(ChatCompletion, completion) + + def construct_prompt(chat_completion: str) -> CompletionCreateParamsBase: chat_completion_dict = json.loads(chat_completion) model = chat_completion_dict.get("model") @@ -269,11 +304,17 @@ def run_agent_procedure(args: Any) -> None: root.info(f"Trace id: {trace_id}") root.info(f"Executing request in directory {args.custom_model_dir}") - result = execute_drum( - chat_completion=chat_completion, - default_headers=default_headers, - custom_model_dir=args.custom_model_dir, - ) + if args.use_serverless: + result = execute_drum_inline( + chat_completion=chat_completion, + custom_model_dir=args.custom_model_dir, + ) + else: + result = execute_drum( + chat_completion=chat_completion, + default_headers=default_headers, + custom_model_dir=args.custom_model_dir, + ) store_result( result, trace_id, diff --git a/public_dropin_environments/python3_keras/Dockerfile b/public_dropin_environments/python3_keras/Dockerfile index a51165ec4..902851443 100644 --- a/public_dropin_environments/python3_keras/Dockerfile +++ b/public_dropin_environments/python3_keras/Dockerfile @@ -1,11 +1,11 @@ # This is a private chain-guard development image that is stored in DataRobot's private registry. # Replace it with your own development chain-gaurd image if you build your own. -ARG BASE_ROOT_IMAGE=datarobotdev/mirror_chainguard_datarobot.com_python-fips:3.11-dev +ARG BASE_ROOT_IMAGE=datarobot/mirror_chainguard_datarobot.com_python-fips:3.11-dev FROM ${BASE_ROOT_IMAGE} AS build # This is a private production chain-guard image that is stored in DataRobot's private registry. # Replace it with your own production chain-gaurd image if you build your own. -FROM datarobotdev/mirror_chainguard_datarobot.com_python-fips:3.11 +FROM datarobot/mirror_chainguard_datarobot.com_python-fips:3.11 USER root diff --git a/public_dropin_environments/python3_keras/env_info.json b/public_dropin_environments/python3_keras/env_info.json index 3a0c1b007..e556fce7a 100644 --- a/public_dropin_environments/python3_keras/env_info.json +++ b/public_dropin_environments/python3_keras/env_info.json @@ -4,17 +4,18 @@ "description": "This template environment can be used to create artifact-only keras custom models. This environment contains keras backed by tensorflow and only requires your model artifact as a .h5 file and optionally a custom.py file.", "programmingLanguage": "python", "label": "", - "environmentVersionId": "6848b5b62081a817e65a8c44", + "environmentVersionId": "6890c005004f7b79c7006c23", "environmentVersionDescription": "", "isPublic": true, + "isDownloadable": true, "useCases": [ "customModel" ], "contextUrl": "https://github.com/datarobot/datarobot-user-models/tree/master/public_dropin_environments/python3_keras", "imageRepository": "env-python-keras", "tags": [ - "v11.1.0-6848b5b62081a817e65a8c44", - "6848b5b62081a817e65a8c44", - "v11.1.0-latest" + "v11.2.0-6890c005004f7b79c7006c23", + "6890c005004f7b79c7006c23", + "v11.2.0-latest" ] } diff --git a/public_dropin_environments/python3_keras/requirements.txt b/public_dropin_environments/python3_keras/requirements.txt index e48f6fe7a..72350ad75 100644 --- a/public_dropin_environments/python3_keras/requirements.txt +++ b/public_dropin_environments/python3_keras/requirements.txt @@ -21,8 +21,8 @@ charset-normalizer==3.4.2 click==8.2.1 cryptography==45.0.3 datarobot==3.7.1 -datarobot-drum==1.16.17 -datarobot-mlops==11.1.0a3 +datarobot-drum==1.16.19 +datarobot-mlops==11.1.0 datarobot-storage==2.2.0 deprecated==1.2.18 docker==7.1.0 @@ -76,10 +76,10 @@ optree==0.15.0 orjson==3.10.18 packaging==25.0 pandas==2.2.3 -pillow==11.2.1 +pillow==11.3.0 progress==1.6 proto-plus==1.26.1 -protobuf==5.29.4 +protobuf==5.29.5 psutil==7.0.0 py4j==0.10.9.9 pyasn1==0.6.1 @@ -92,7 +92,7 @@ pyjwt[crypto]==2.10.1 python-dateutil==2.9.0.post0 pytz==2025.2 pyyaml==6.0.2 -requests==2.32.3 +requests==2.32.4 requests-toolbelt==1.0.0 rich==14.0.0 rsa==4.9.1 @@ -115,7 +115,7 @@ trafaret==2.1.1 typing-extensions==4.13.2 typing-inspection==0.4.1 tzdata==2025.2 -urllib3==2.4.0 +urllib3==2.5.0 werkzeug==3.1.3 wheel==0.45.1 wrapt==1.17.2 diff --git a/public_dropin_environments/python3_onnx/Dockerfile b/public_dropin_environments/python3_onnx/Dockerfile index f5bb7a53b..47ed837c4 100644 --- a/public_dropin_environments/python3_onnx/Dockerfile +++ b/public_dropin_environments/python3_onnx/Dockerfile @@ -1,11 +1,11 @@ # This is a private chain-guard development image that is stored in DataRobot's private registry. # Replace it with your own development chain-gaurd image if you build your own. -ARG BASE_ROOT_IMAGE=datarobotdev/mirror_chainguard_datarobot.com_python-fips:3.11-dev +ARG BASE_ROOT_IMAGE=datarobot/mirror_chainguard_datarobot.com_python-fips:3.11-dev FROM ${BASE_ROOT_IMAGE} AS build # This is a private production chain-guard image that is stored in DataRobot's private registry. # Replace it with your own production chain-gaurd image if you build your own. -FROM datarobotdev/mirror_chainguard_datarobot.com_python-fips:3.11 +FROM datarobot/mirror_chainguard_datarobot.com_python-fips:3.11 USER root diff --git a/public_dropin_environments/python3_onnx/env_info.json b/public_dropin_environments/python3_onnx/env_info.json index c7c1ad86d..ef2dd1a28 100644 --- a/public_dropin_environments/python3_onnx/env_info.json +++ b/public_dropin_environments/python3_onnx/env_info.json @@ -4,17 +4,18 @@ "description": "This template environment can be used to create artifact-only ONNX custom models. This environment contains ONNX runtime and only requires your model artifact as an .onnx file and optionally a custom.py file.", "programmingLanguage": "python", "label": "", - "environmentVersionId": "6848b5cf2081a8180cf1da56", + "environmentVersionId": "6890c00500644578ad000387", "environmentVersionDescription": "", "isPublic": true, + "isDownloadable": true, "useCases": [ "customModel" ], "contextUrl": "https://github.com/datarobot/datarobot-user-models/tree/master/public_dropin_environments/python3_onnx", "imageRepository": "env-python-onnx", "tags": [ - "v11.1.0-6848b5cf2081a8180cf1da56", - "6848b5cf2081a8180cf1da56", - "v11.1.0-latest" + "v11.2.0-6890c00500644578ad000387", + "6890c00500644578ad000387", + "v11.2.0-latest" ] } diff --git a/public_dropin_environments/python3_onnx/requirements.txt b/public_dropin_environments/python3_onnx/requirements.txt index be0421e5f..516b2a846 100644 --- a/public_dropin_environments/python3_onnx/requirements.txt +++ b/public_dropin_environments/python3_onnx/requirements.txt @@ -20,8 +20,8 @@ click==8.2.1 coloredlogs==15.0.1 cryptography==45.0.3 datarobot==3.7.1 -datarobot-drum==1.16.17 -datarobot-mlops==11.1.0a3 +datarobot-drum==1.16.19 +datarobot-mlops==11.1.0 datarobot-storage==2.2.0 deprecated==1.2.18 docker==7.1.0 @@ -65,10 +65,10 @@ opentelemetry-util-http==0.54b1 orjson==3.10.18 packaging==25.0 pandas==2.2.3 -pillow==11.2.1 +pillow==11.3.0 progress==1.6 proto-plus==1.26.1 -protobuf==5.29.4 +protobuf==5.29.5 psutil==7.0.0 py4j==0.10.9.9 pyasn1==0.6.1 @@ -80,7 +80,7 @@ pyjwt[crypto]==2.10.1 python-dateutil==2.9.0.post0 pytz==2025.2 pyyaml==6.0.2 -requests==2.32.3 +requests==2.32.4 requests-toolbelt==1.0.0 rsa==4.9.1 ruamel-yaml==0.17.4 @@ -98,7 +98,7 @@ trafaret==2.1.1 typing-extensions==4.13.2 typing-inspection==0.4.1 tzdata==2025.2 -urllib3==2.4.0 +urllib3==2.5.0 werkzeug==3.1.3 wrapt==1.17.2 zipp==3.22.0 diff --git a/public_dropin_environments/python3_pmml/Dockerfile b/public_dropin_environments/python3_pmml/Dockerfile index 74022b9b9..19d2d2aaf 100644 --- a/public_dropin_environments/python3_pmml/Dockerfile +++ b/public_dropin_environments/python3_pmml/Dockerfile @@ -1,6 +1,6 @@ # This is a private chain-guard development image that is stored in DataRobot's private registry. # Replace it with your own development chain-gaurd image if you build your own. -ARG BASE_ROOT_IMAGE=datarobotdev/mirror_chainguard_datarobot.com_python-fips:3.11-dev +ARG BASE_ROOT_IMAGE=datarobot/mirror_chainguard_datarobot.com_python-fips:3.11-dev FROM ${BASE_ROOT_IMAGE} AS build USER root @@ -9,7 +9,7 @@ RUN apk add --no-cache openjdk-11 # This is a private production chain-guard image that is stored in DataRobot's private registry. # Replace it with your own production chain-gaurd image if you build your own. -FROM datarobotdev/mirror_chainguard_datarobot.com_python-fips:3.11 +FROM datarobot/mirror_chainguard_datarobot.com_python-fips:3.11 USER root diff --git a/public_dropin_environments/python3_pmml/env_info.json b/public_dropin_environments/python3_pmml/env_info.json index 74d2d2b52..5c07e9574 100644 --- a/public_dropin_environments/python3_pmml/env_info.json +++ b/public_dropin_environments/python3_pmml/env_info.json @@ -4,17 +4,18 @@ "description": "This template environment can be used to create artifact-only PMML custom models. This environment contains PyPMML and only requires your model artifact as a .pmml file and optionally a custom.py file.", "programmingLanguage": "python", "label": "", - "environmentVersionId": "6848b5d92081a8182b5d8d58", + "environmentVersionId": "6890c005003ede60bb004932", "environmentVersionDescription": "", "isPublic": true, + "isDownloadable": true, "useCases": [ "customModel" ], "contextUrl": "https://github.com/datarobot/datarobot-user-models/tree/master/public_dropin_environments/python3_pmml", "imageRepository": "env-python-pmml", "tags": [ - "v11.1.0-6848b5d92081a8182b5d8d58", - "6848b5d92081a8182b5d8d58", - "v11.1.0-latest" + "v11.2.0-6890c005003ede60bb004932", + "6890c005003ede60bb004932", + "v11.2.0-latest" ] } diff --git a/public_dropin_environments/python3_pmml/requirements.txt b/public_dropin_environments/python3_pmml/requirements.txt index 5efdd6fe4..8fa0eed31 100644 --- a/public_dropin_environments/python3_pmml/requirements.txt +++ b/public_dropin_environments/python3_pmml/requirements.txt @@ -19,8 +19,8 @@ charset-normalizer==3.4.2 click==8.2.1 cryptography==45.0.3 datarobot==3.7.1 -datarobot-drum==1.16.17 -datarobot-mlops==11.1.0a3 +datarobot-drum==1.16.19 +datarobot-mlops==11.1.0 datarobot-storage==2.2.0 deprecated==1.2.18 docker==7.1.0 @@ -60,10 +60,10 @@ opentelemetry-util-http==0.54b1 orjson==3.10.18 packaging==25.0 pandas==2.2.3 -pillow==11.2.1 +pillow==11.3.0 progress==1.6 proto-plus==1.26.1 -protobuf==5.29.4 +protobuf==5.29.5 psutil==7.0.0 py4j==0.10.9.9 pyasn1==0.6.1 @@ -76,7 +76,7 @@ pypmml==1.5.6 python-dateutil==2.9.0.post0 pytz==2025.2 pyyaml==6.0.2 -requests==2.32.3 +requests==2.32.4 requests-toolbelt==1.0.0 rsa==4.9.1 ruamel-yaml==0.17.4 @@ -91,7 +91,7 @@ trafaret==2.1.1 typing-extensions==4.13.2 typing-inspection==0.4.1 tzdata==2025.2 -urllib3==2.4.0 +urllib3==2.5.0 werkzeug==3.1.3 wrapt==1.17.2 zipp==3.22.0 diff --git a/public_dropin_environments/python3_pytorch/Dockerfile b/public_dropin_environments/python3_pytorch/Dockerfile index 3f5730fc6..5e0ab8862 100644 --- a/public_dropin_environments/python3_pytorch/Dockerfile +++ b/public_dropin_environments/python3_pytorch/Dockerfile @@ -1,11 +1,11 @@ # This is a private chain-guard development image that is stored in DataRobot's private registry. # Replace it with your own development chain-gaurd image if you build your own. -ARG BASE_ROOT_IMAGE=datarobotdev/mirror_chainguard_datarobot.com_python-fips:3.11-dev +ARG BASE_ROOT_IMAGE=datarobot/mirror_chainguard_datarobot.com_python-fips:3.11-dev FROM ${BASE_ROOT_IMAGE} AS build # This is a private production chain-guard image that is stored in DataRobot's private registry. # Replace it with your own production chain-gaurd image if you build your own. -FROM datarobotdev/mirror_chainguard_datarobot.com_python-fips:3.11 +FROM datarobot/mirror_chainguard_datarobot.com_python-fips:3.11 USER root diff --git a/public_dropin_environments/python3_pytorch/env_info.json b/public_dropin_environments/python3_pytorch/env_info.json index 306598b06..f55f1336f 100644 --- a/public_dropin_environments/python3_pytorch/env_info.json +++ b/public_dropin_environments/python3_pytorch/env_info.json @@ -4,17 +4,18 @@ "description": "This template environment can be used to create artifact-only PyTorch custom models. This environment contains PyTorch and requires only your model artifact as a .pth file, any other code needed to deserialize your model, and optionally a custom.py file.", "programmingLanguage": "python", "label": "", - "environmentVersionId": "6848b5e32081a8184891014d", + "environmentVersionId": "6890c0050035434368007c08", "environmentVersionDescription": "", "isPublic": true, + "isDownloadable": true, "useCases": [ "customModel" ], "contextUrl": "https://github.com/datarobot/datarobot-user-models/tree/master/public_dropin_environments/python3_pytorch", "imageRepository": "env-python-pytorch", "tags": [ - "v11.1.0-6848b5e32081a8184891014d", - "6848b5e32081a8184891014d", - "v11.1.0-latest" + "v11.2.0-6890c0050035434368007c08", + "6890c0050035434368007c08", + "v11.2.0-latest" ] } diff --git a/public_dropin_environments/python3_pytorch/requirements.txt b/public_dropin_environments/python3_pytorch/requirements.txt index eed762a15..5e0b7eb17 100644 --- a/public_dropin_environments/python3_pytorch/requirements.txt +++ b/public_dropin_environments/python3_pytorch/requirements.txt @@ -19,8 +19,8 @@ charset-normalizer==3.4.2 click==8.2.1 cryptography==45.0.3 datarobot==3.7.1 -datarobot-drum==1.16.17 -datarobot-mlops==11.1.0a3 +datarobot-drum==1.16.19 +datarobot-mlops==11.1.0 datarobot-storage==2.2.0 deprecated==1.2.18 docker==7.1.0 @@ -78,10 +78,10 @@ opentelemetry-util-http==0.54b1 orjson==3.10.18 packaging==25.0 pandas==2.2.3 -pillow==11.2.1 +pillow==11.3.0 progress==1.6 proto-plus==1.26.1 -protobuf==5.29.4 +protobuf==5.29.5 psutil==7.0.0 py4j==0.10.9.9 pyasn1==0.6.1 @@ -93,7 +93,7 @@ pyjwt[crypto]==2.10.1 python-dateutil==2.9.0.post0 pytz==2025.2 pyyaml==6.0.2 -requests==2.32.3 +requests==2.32.4 requests-toolbelt==1.0.0 rsa==4.9.1 ruamel-yaml==0.17.4 @@ -113,7 +113,7 @@ triton==3.3.0 typing-extensions==4.13.2 typing-inspection==0.4.1 tzdata==2025.2 -urllib3==2.4.0 +urllib3==2.5.0 werkzeug==3.1.3 wrapt==1.17.2 zipp==3.22.0 diff --git a/public_dropin_environments/python3_sklearn/Dockerfile b/public_dropin_environments/python3_sklearn/Dockerfile index 3f5730fc6..5e0ab8862 100644 --- a/public_dropin_environments/python3_sklearn/Dockerfile +++ b/public_dropin_environments/python3_sklearn/Dockerfile @@ -1,11 +1,11 @@ # This is a private chain-guard development image that is stored in DataRobot's private registry. # Replace it with your own development chain-gaurd image if you build your own. -ARG BASE_ROOT_IMAGE=datarobotdev/mirror_chainguard_datarobot.com_python-fips:3.11-dev +ARG BASE_ROOT_IMAGE=datarobot/mirror_chainguard_datarobot.com_python-fips:3.11-dev FROM ${BASE_ROOT_IMAGE} AS build # This is a private production chain-guard image that is stored in DataRobot's private registry. # Replace it with your own production chain-gaurd image if you build your own. -FROM datarobotdev/mirror_chainguard_datarobot.com_python-fips:3.11 +FROM datarobot/mirror_chainguard_datarobot.com_python-fips:3.11 USER root diff --git a/public_dropin_environments/python3_sklearn/env_info.json b/public_dropin_environments/python3_sklearn/env_info.json index bd32afe37..55b60e406 100644 --- a/public_dropin_environments/python3_sklearn/env_info.json +++ b/public_dropin_environments/python3_sklearn/env_info.json @@ -4,17 +4,18 @@ "description": "This template environment can be used to create artifact-only scikit-learn custom models. This environment contains scikit-learn and only requires your model artifact as a .pkl file and optionally a custom.py file.", "programmingLanguage": "python", "label": "", - "environmentVersionId": "6848b6142081a818745ff7b3", + "environmentVersionId": "6890c00500013936b6007db0", "environmentVersionDescription": "", "isPublic": true, + "isDownloadable": true, "useCases": [ "customModel" ], "contextUrl": "https://github.com/datarobot/datarobot-user-models/tree/master/public_dropin_environments/python3_sklearn", "imageRepository": "env-python-sklearn", "tags": [ - "v11.1.0-6848b6142081a818745ff7b3", - "6848b6142081a818745ff7b3", - "v11.1.0-latest" + "v11.2.0-6890c00500013936b6007db0", + "6890c00500013936b6007db0", + "v11.2.0-latest" ] } diff --git a/public_dropin_environments/python3_sklearn/requirements.txt b/public_dropin_environments/python3_sklearn/requirements.txt index 203761ee1..5f75f0e78 100644 --- a/public_dropin_environments/python3_sklearn/requirements.txt +++ b/public_dropin_environments/python3_sklearn/requirements.txt @@ -19,8 +19,8 @@ charset-normalizer==3.4.2 click==8.2.1 cryptography==45.0.3 datarobot==3.7.1 -datarobot-drum==1.16.17 -datarobot-mlops==11.1.0a3 +datarobot-drum==1.16.19 +datarobot-mlops==11.1.0 datarobot-storage==2.2.0 deprecated==1.2.18 docker==7.1.0 @@ -60,10 +60,10 @@ opentelemetry-util-http==0.54b1 orjson==3.10.18 packaging==25.0 pandas==2.2.3 -pillow==11.2.1 +pillow==11.3.0 progress==1.6 proto-plus==1.26.1 -protobuf==5.29.4 +protobuf==5.29.5 psutil==7.0.0 py4j==0.10.9.9 pyasn1==0.6.1 @@ -75,7 +75,7 @@ pyjwt[crypto]==2.10.1 python-dateutil==2.9.0.post0 pytz==2025.2 pyyaml==6.0.2 -requests==2.32.3 +requests==2.32.4 requests-toolbelt==1.0.0 rsa==4.9.1 ruamel-yaml==0.17.4 @@ -92,7 +92,7 @@ trafaret==2.1.1 typing-extensions==4.13.2 typing-inspection==0.4.1 tzdata==2025.2 -urllib3==2.4.0 +urllib3==2.5.0 werkzeug==3.1.3 wrapt==1.17.2 zipp==3.22.0 diff --git a/public_dropin_environments/python3_xgboost/Dockerfile b/public_dropin_environments/python3_xgboost/Dockerfile index f5bb7a53b..47ed837c4 100644 --- a/public_dropin_environments/python3_xgboost/Dockerfile +++ b/public_dropin_environments/python3_xgboost/Dockerfile @@ -1,11 +1,11 @@ # This is a private chain-guard development image that is stored in DataRobot's private registry. # Replace it with your own development chain-gaurd image if you build your own. -ARG BASE_ROOT_IMAGE=datarobotdev/mirror_chainguard_datarobot.com_python-fips:3.11-dev +ARG BASE_ROOT_IMAGE=datarobot/mirror_chainguard_datarobot.com_python-fips:3.11-dev FROM ${BASE_ROOT_IMAGE} AS build # This is a private production chain-guard image that is stored in DataRobot's private registry. # Replace it with your own production chain-gaurd image if you build your own. -FROM datarobotdev/mirror_chainguard_datarobot.com_python-fips:3.11 +FROM datarobot/mirror_chainguard_datarobot.com_python-fips:3.11 USER root diff --git a/public_dropin_environments/python3_xgboost/env_info.json b/public_dropin_environments/python3_xgboost/env_info.json index bb34b27a6..2137c2079 100644 --- a/public_dropin_environments/python3_xgboost/env_info.json +++ b/public_dropin_environments/python3_xgboost/env_info.json @@ -4,17 +4,18 @@ "description": "This template environment can be used to create artifact-only xgboost custom models. This environment contains xgboost and only requires your model artifact as a .pkl file and optionally a custom.py file.", "programmingLanguage": "python", "label": "", - "environmentVersionId": "6848b61e2081a818907a7f56", + "environmentVersionId": "6890c0050067b573320005a4", "environmentVersionDescription": "", "isPublic": true, + "isDownloadable": true, "useCases": [ "customModel" ], "contextUrl": "https://github.com/datarobot/datarobot-user-models/tree/master/public_dropin_environments/python3_xgboost", "imageRepository": "env-python-xgboost", "tags": [ - "v11.1.0-6848b61e2081a818907a7f56", - "6848b61e2081a818907a7f56", - "v11.1.0-latest" + "v11.2.0-6890c0050067b573320005a4", + "6890c0050067b573320005a4", + "v11.2.0-latest" ] } diff --git a/public_dropin_environments/python3_xgboost/requirements.txt b/public_dropin_environments/python3_xgboost/requirements.txt index 9d5df4b21..dd4354eea 100644 --- a/public_dropin_environments/python3_xgboost/requirements.txt +++ b/public_dropin_environments/python3_xgboost/requirements.txt @@ -19,8 +19,8 @@ charset-normalizer==3.4.2 click==8.2.1 cryptography==45.0.3 datarobot==3.7.1 -datarobot-drum==1.16.17 -datarobot-mlops==11.1.0a3 +datarobot-drum==1.16.19 +datarobot-mlops==11.1.0 datarobot-storage==2.2.0 deprecated==1.2.18 docker==7.1.0 @@ -61,10 +61,10 @@ opentelemetry-util-http==0.54b1 orjson==3.10.18 packaging==25.0 pandas==2.2.3 -pillow==11.2.1 +pillow==11.3.0 progress==1.6 proto-plus==1.26.1 -protobuf==5.29.4 +protobuf==5.29.5 psutil==7.0.0 py4j==0.10.9.9 pyasn1==0.6.1 @@ -76,7 +76,7 @@ pyjwt[crypto]==2.10.1 python-dateutil==2.9.0.post0 pytz==2025.2 pyyaml==6.0.2 -requests==2.32.3 +requests==2.32.4 requests-toolbelt==1.0.0 rsa==4.9.1 ruamel-yaml==0.17.4 @@ -93,7 +93,7 @@ trafaret==2.1.1 typing-extensions==4.13.2 typing-inspection==0.4.1 tzdata==2025.2 -urllib3==2.4.0 +urllib3==2.5.0 werkzeug==3.1.3 wrapt==1.17.2 xgboost==3.0.2 diff --git a/public_dropin_environments/r_lang/Dockerfile b/public_dropin_environments/r_lang/Dockerfile index 72208f13e..feb34f30f 100644 --- a/public_dropin_environments/r_lang/Dockerfile +++ b/public_dropin_environments/r_lang/Dockerfile @@ -1,6 +1,6 @@ # This is a private chain-guard development image that is stored in DataRobot's private registry. # Replace it with your own development chain-gaurd image if you build your own. -ARG BASE_ROOT_IMAGE=datarobotdev/mirror_chainguard_datarobot.com_python-fips:3.11-dev +ARG BASE_ROOT_IMAGE=datarobot/mirror_chainguard_datarobot.com_python-fips:3.11-dev FROM ${BASE_ROOT_IMAGE} AS build USER root @@ -40,7 +40,7 @@ RUN Rscript -e "install.packages( \ # This is a private production chain-guard image that is stored in DataRobot's private registry. # Replace it with your own production chain-gaurd image if you build your own. -FROM datarobotdev/mirror_chainguard_datarobot.com_python-fips:3.11 +FROM datarobot/mirror_chainguard_datarobot.com_python-fips:3.11 USER root diff --git a/public_dropin_environments/r_lang/env_info.json b/public_dropin_environments/r_lang/env_info.json index e4d0645f3..6206c8c40 100644 --- a/public_dropin_environments/r_lang/env_info.json +++ b/public_dropin_environments/r_lang/env_info.json @@ -4,17 +4,18 @@ "description": "This template environment can be used to create artifact-only R custom models that use the caret library. Your custom model archive need only contain your model artifacts if you use the environment correctly.", "programmingLanguage": "r", "label": "", - "environmentVersionId": "6851e335101ce30faf6969f1", + "environmentVersionId": "6890c005005c537501002643", "environmentVersionDescription": "", "isPublic": true, + "isDownloadable": true, "useCases": [ "customModel" ], "contextUrl": "https://github.com/datarobot/datarobot-user-models/tree/master/public_dropin_environments/r_lang", "imageRepository": "env-r-lang", "tags": [ - "v11.1.0-6851e335101ce30faf6969f1", - "6851e335101ce30faf6969f1", - "v11.1.0-latest" + "v11.2.0-6890c005005c537501002643", + "6890c005005c537501002643", + "v11.2.0-latest" ] } diff --git a/public_dropin_environments/r_lang/requirements.txt b/public_dropin_environments/r_lang/requirements.txt index ed0896314..bf0df5e71 100644 --- a/public_dropin_environments/r_lang/requirements.txt +++ b/public_dropin_environments/r_lang/requirements.txt @@ -19,8 +19,8 @@ charset-normalizer==3.4.2 click==8.2.1 cryptography==45.0.3 datarobot==3.7.1 -datarobot-drum[R,r]==1.16.17 -datarobot-mlops==11.1.0a3 +datarobot-drum[R,r]==1.16.19 +datarobot-mlops==11.1.0 datarobot-storage==2.2.0 deprecated==1.2.18 docker==7.1.0 @@ -59,10 +59,10 @@ opentelemetry-util-http==0.54b1 orjson==3.10.18 packaging==25.0 pandas==2.2.3 -pillow==11.2.1 +pillow==11.3.0 progress==1.6 proto-plus==1.26.1 -protobuf==5.29.4 +protobuf==5.29.5 psutil==7.0.0 py4j==0.10.9.9 pyasn1==0.6.1 @@ -74,7 +74,7 @@ pyjwt[crypto]==2.10.1 python-dateutil==2.9.0.post0 pytz==2025.2 pyyaml==6.0.2 -requests==2.32.3 +requests==2.32.4 requests-toolbelt==1.0.0 rpy2==3.5.8 rsa==4.9.1 @@ -91,7 +91,7 @@ typing-extensions==4.13.2 typing-inspection==0.4.1 tzdata==2025.2 tzlocal==5.3.1 -urllib3==2.4.0 +urllib3==2.5.0 werkzeug==3.1.3 wrapt==1.17.2 zipp==3.22.0 diff --git a/public_dropin_gpu_environments/vllm/Dockerfile b/public_dropin_gpu_environments/vllm/Dockerfile index 0f54d7a22..83406ce23 100644 --- a/public_dropin_gpu_environments/vllm/Dockerfile +++ b/public_dropin_gpu_environments/vllm/Dockerfile @@ -37,5 +37,7 @@ WORKDIR ${CODE_DIR} COPY --chown=1000:0 ./*.sh ./*.py ${CODE_DIR}/ ENV WITH_ERROR_SERVER=1 + #This makes print statements show up in the logs API +ENV PYTHONUNBUFFERED=1 ENTRYPOINT ["/opt/code/start_server.sh"] diff --git a/public_dropin_gpu_environments/vllm/env_info.json b/public_dropin_gpu_environments/vllm/env_info.json index 2d5435ccb..6de090bd5 100644 --- a/public_dropin_gpu_environments/vllm/env_info.json +++ b/public_dropin_gpu_environments/vllm/env_info.json @@ -4,17 +4,18 @@ "description": "A high-throughput and memory-efficient inference and serving engine for LLMs.", "programmingLanguage": "python", "label": "v0.8.3+dr.1", - "environmentVersionId": "6848b64a2081a81aa6129277", + "environmentVersionId": "6865f515d7dccb0f91ea025e", "environmentVersionDescription": "Update to vllm v0.8.3\nFROM vllm/vllm-openai:v0.8.3", "isPublic": true, + "isDownloadable": true, "useCases": [ "customModel" ], "contextUrl": "https://github.com/datarobot/datarobot-user-models/tree/master/public_dropin_gpu_environments/vllm", "imageRepository": "env-gpu-vllm", "tags": [ - "v11.1.0-6848b64a2081a81aa6129277", - "6848b64a2081a81aa6129277", - "v11.1.0-latest" + "v11.2.0-6865f515d7dccb0f91ea025e", + "6865f515d7dccb0f91ea025e", + "v11.2.0-latest" ] } diff --git a/public_dropin_gpu_environments/vllm/requirements.txt b/public_dropin_gpu_environments/vllm/requirements.txt index 67c85e9b6..47ee7dd48 100644 --- a/public_dropin_gpu_environments/vllm/requirements.txt +++ b/public_dropin_gpu_environments/vllm/requirements.txt @@ -26,9 +26,9 @@ charset-normalizer==3.4.1 click==8.1.8 cryptography==44.0.1 datarobot==3.6.2 -datarobot-drum==1.16.17 -datarobot-mlops==11.1.0a3 -datarobot-mlops-connected-client==11.1.0a3 +datarobot-drum==1.16.20 +datarobot-mlops==11.1.0 +datarobot-mlops-connected-client==11.1.0 datarobot-storage==0.0.0 deprecated==1.2.18 distro==1.9.0 diff --git a/public_dropin_notebook_environments/python311_notebook_base/env_info.json b/public_dropin_notebook_environments/python311_notebook_base/env_info.json index a359143fb..6cba90893 100644 --- a/public_dropin_notebook_environments/python311_notebook_base/env_info.json +++ b/public_dropin_notebook_environments/python311_notebook_base/env_info.json @@ -7,6 +7,7 @@ "environmentVersionId": "6848b6652081a81af97479b4", "environmentVersionDescription": "", "isPublic": true, + "isDownloadable": true, "useCases": [ "notebook", "customModel" @@ -14,8 +15,8 @@ "contextUrl": "https://github.com/datarobot/datarobot-user-models/tree/master/public_dropin_notebook_environments/python311_notebook_base", "imageRepository": "env-notebook-python311-notebook-base", "tags": [ - "v11.1.0-6848b6652081a81af97479b4", + "v11.2.0-6848b6652081a81af97479b4", "6848b6652081a81af97479b4", - "v11.1.0-latest" + "v11.2.0-latest" ] } diff --git a/tests/conftest.py b/tests/conftest.py index c945c0c34..21c5ddcf5 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -18,7 +18,6 @@ PYTHON_XGBOOST, PYTHON_KERAS, PYTHON_PYTORCH, - PYTHON311_GENAI, PYTHON_ONNX, PYTHON_PYPMML, R_LANG, @@ -199,7 +198,6 @@ CUSTOM_TASK_INTERFACE_PYTORCH_BINARY, CUSTOM_TASK_INTERFACE_PYTORCH_MULTICLASS, ], - PYTHON311_GENAI: [PYTHON_TEXT_GENERATION, PYTHON_AGENTIC_WORKFLOW], PYTHON_ONNX: [ONNX], PYTHON_PYPMML: [PYPMML], R_LANG: [ @@ -258,7 +256,6 @@ def pytest_addoption(parser): PYTHON_XGBOOST, PYTHON_KERAS, PYTHON_PYTORCH, - PYTHON311_GENAI, PYTHON_ONNX, PYTHON_PYPMML, R_LANG, diff --git a/tests/constants.py b/tests/constants.py index 8111d4b54..9caf02d13 100644 --- a/tests/constants.py +++ b/tests/constants.py @@ -31,7 +31,6 @@ PYTHON_XGBOOST = "python3_xgboost" PYTHON_KERAS = "python3_keras" PYTHON_PYTORCH = "python3_pytorch" -PYTHON311_GENAI = "python311_genai" PYTHON_ONNX = "python3_onnx" PYTHON_PYPMML = "python3_pmml" R_LANG = "r_lang" diff --git a/tests/e2e/conftest.py b/tests/e2e/conftest.py index a5c5ff8ca..a96e2756e 100644 --- a/tests/e2e/conftest.py +++ b/tests/e2e/conftest.py @@ -139,13 +139,6 @@ def pytorch_drop_in_env(): ) -@pytest.fixture(scope="session") -def python311_genai_drop_in_env(): - return create_drop_in_env( - PUBLIC_DROPIN_ENVS_PATH, "python311_genai", max_wait=3 * DEFAULT_MAX_WAIT - ) - - @pytest.fixture(scope="session") def onnx_drop_in_env(): return create_drop_in_env(PUBLIC_DROPIN_ENVS_PATH, "python3_onnx") diff --git a/tests/e2e/test_drop_in_environments.py b/tests/e2e/test_drop_in_environments.py index 01cdf3c1e..3846b2f0c 100644 --- a/tests/e2e/test_drop_in_environments.py +++ b/tests/e2e/test_drop_in_environments.py @@ -160,17 +160,6 @@ def python311_custom_model(self, python311_drop_in_env): return custom_model.id, model_version.id - @pytest.fixture(scope="session") - def python311_genai_custom_model(self, python311_genai_drop_in_env): - env_id, _ = python311_genai_drop_in_env - return self.make_custom_model( - "torch_reg.pth", - env_id, - custom_predict_path=CUSTOM_PREDICT_PY_PATH, - other_file_names=["PyTorch.py"], - maximum_memory=8 * 1024 * 1024 * 1024, - ) - @pytest.fixture(scope="session") def onnx_regression_custom_model(self, onnx_drop_in_env): env_id, _ = onnx_drop_in_env @@ -206,7 +195,6 @@ def r_regression_custom_model(self, r_drop_in_env): "model, test_data_id, max_wait", [ ("python311_custom_model", "regression_testing_data", DEFAULT_MAX_WAIT), - ("python311_genai_custom_model", "regression_testing_data", 3 * DEFAULT_MAX_WAIT), ("r_regression_custom_model", "regression_testing_data", 5 * DEFAULT_MAX_WAIT), ("pytorch_regression_custom_model", "regression_testing_data", 2 * DEFAULT_MAX_WAIT), ("keras_regression_custom_model", "regression_testing_data", DEFAULT_MAX_WAIT), diff --git a/tests/functional/test_per_framework.json b/tests/functional/test_per_framework.json index 8c562ca09..beff7668a 100644 --- a/tests/functional/test_per_framework.json +++ b/tests/functional/test_per_framework.json @@ -8,7 +8,6 @@ {"env_folder": "public_dropin_environments", "framework": "python3_sklearn", "with_local": true}, {"env_folder": "public_dropin_environments", "framework": "python3_xgboost", "with_local": true}, {"env_folder": "public_dropin_environments", "framework": "python311", "with_local": true}, - {"env_folder": "public_dropin_environments", "framework": "python311_genai", "with_local": true}, {"env_folder": "public_dropin_environments", "framework": "r_lang", "with_local": true}, {"env_folder": "public_dropin_gpu_environments", "framework": "vllm"} ]