Skip to content

Commit

Permalink
Renovate to add PR for R/Python older versions
Browse files Browse the repository at this point in the history
  • Loading branch information
Gaspi committed Dec 6, 2024
1 parent f0c3029 commit 232d706
Show file tree
Hide file tree
Showing 2 changed files with 57 additions and 16 deletions.
71 changes: 56 additions & 15 deletions renovate.json
Original file line number Diff line number Diff line change
@@ -1,48 +1,89 @@
{
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
"extends": [
"config:base"
"config:recommended"
],
"regexManagers": [
"customManagers": [
{
"customType": "regex",
"fileMatch": ["r-minimal/Dockerfile"],
"matchStrings": ["ARG R_VERSION=\"(?<currentValue>.*?)\""],
"depNameTemplate": "rocker-org/rocker-versioned2",
"datasourceTemplate": "github-releases",
"extractVersionTemplate": "^R(?<version>.*)$"
"extractVersionTemplate": "^R(?<version>.*?)$"
},
{
"fileMatch": [".github/workflows/main-workflow-template.yml"],
"matchStrings": ["r_version_1: (?<currentValue>.*?)"],
"customType": "regex",
"fileMatch": [".github/workflows/main-workflow.yml"],
"matchStrings": ["(^| )r_version_1: *(?<currentValue>[^\\s]*)"],
"depNameTemplate": "rocker-org/rocker-versioned2",
"datasourceTemplate": "github-releases",
"extractVersionTemplate": "^R(?<version>.*)$"
"extractVersionTemplate": "^R(?<version>.*?)$"
},
{
"customType": "regex",
"fileMatch": [".github/workflows/main-workflow.yml"],
"matchStrings": ["(^| )r_version_2: *(?<currentValue>[^\\s]*)"],
"depNameTemplate": "rocker-org/rocker-versioned2",
"datasourceTemplate": "github-releases",
"extractVersionTemplate": "^R(?<version>.*?)$"
},
{
"customType": "regex",
"fileMatch": ["python-minimal/Dockerfile"],
"matchStrings": ["ARG PYTHON_VERSION=\"(?<currentValue>.*?)\""],
"depNameTemplate": "conda-forge/python",
"datasourceTemplate": "conda"
},
{
"fileMatch": [".github/workflows/main-workflow-template.yml"],
"matchStrings": ["python_version_1: (?<currentValue>.*?)"],
"customType": "regex",
"fileMatch": [".github/workflows/main-workflow.yml"],
"matchStrings": ["(^| )python_version_1: *(?<currentValue>[^\\s]*)"],
"depNameTemplate": "conda-forge/python",
"datasourceTemplate": "conda"
},
{
"fileMatch": ["spark/Dockerfile"],
"customType": "regex",
"fileMatch": [".github/workflows/main-workflow.yml"],
"matchStrings": ["(^| )python_version_2: *(?<currentValue>[^\\s]*)"],
"depNameTemplate": "conda-forge/python",
"datasourceTemplate": "conda"
},
{
"customType": "regex",
"fileMatch": ["spark/Dockerfile$"],
"matchStrings": ["ARG SPARK_VERSION=\"(?<currentValue>.*?)\""],
"depNameTemplate": "apache/spark",
"datasourceTemplate": "github-releases",
"extractVersionTemplate": "^v(?<version>.*)$"
"datasourceTemplate": "github-tags",
"extractVersionTemplate": "^v(?<version>[^\\s]*)"
},
{
"fileMatch": [".github/workflows/main-workflow-template.yml"],
"matchStrings": ["spark_version: (?<currentValue>.*?)"],
"customType": "regex",
"fileMatch": [".github/workflows/main-workflow.yml"],
"matchStrings": ["(^| )spark_version: *(?<currentValue>[^\\s]*)"],
"depNameTemplate": "apache/spark",
"datasourceTemplate": "github-releases",
"extractVersionTemplate": "^v(?<version>.*)$"
"datasourceTemplate": "github-tags",
"extractVersionTemplate": "v(?<version>[^\\s]*)"
}
],
"packageRules": [
{
"matchManagers": ["regex"],
"matchJsonata": ["$contains(replaceString, '_version_2')"],
"matchUpdateTypes": ["major", "minor"],
"enabled": false
},
{
"matchManagers": ["regex"],
"matchJsonata": ["$contains(replaceString, 'python_version_2')"],
"separateMinorPatch": true,
"groupName": "Python backup version update"
},
{
"matchManagers": ["regex"],
"matchJsonata": ["$contains(replaceString, 'r_version_2')"],
"separateMinorPatch": true,
"groupName": "R backup version update"
}
]
}
2 changes: 1 addition & 1 deletion spark/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ RUN --mount=type=secret,id=github_token \
apt-get update && \
# Install JDK
/opt/install-java.sh && \
# Install Spark/Hadoop/Hive
# Install Spark/Hadoop/Hive
/opt/install-spark-hadoop-hive.sh && \
# Put Spark config in the right place
cp /opt/spark-env.sh $SPARK_HOME/conf && \
Expand Down

0 comments on commit 232d706

Please sign in to comment.