Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
307 changes: 144 additions & 163 deletions Jenkinsfile
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,8 @@ pipeline {
AZURE_IMAGE = "e6labs.azurecr.io/${RELEASE_NAME}"

AWS_REGION = "us-east-1"
ASSUMED_ROLE_ARN ="arn:aws:iam::670514002493:role/cross-account-jenkins-access"
ASSUMED_ROLE_ARN = "arn:aws:iam::670514002493:role/cross-account-jenkins-access"
DEV_IMAGE = "670514002493.dkr.ecr.us-east-1.amazonaws.com/${RELEASE_NAME}:latest"

}

options {
Expand All @@ -30,186 +29,168 @@ pipeline {
}

stages {
stage('Sonarqube Scan') {
agent {
stage('Sonarqube Scan') {
agent {
kubernetes {
inheritFrom 'docker'
defaultContainer 'docker'
}
}

environment {
SCANNER_HOME = tool 'sonarqube'
environment {
SCANNER_HOME = tool 'sonarqube'
}

steps {
withSonarQubeEnv('sonarqube-jenkins') {
checkout scm
sh '${SCANNER_HOME}/bin/sonar-scanner'
steps {
withSonarQubeEnv('sonarqube-jenkins') {
checkout scm
sh '${SCANNER_HOME}/bin/sonar-scanner'
}
}
}
}
}

// stage("Quality Gate") {
// steps {
// timeout(time: 1, unit: 'HOURS') {
// waitForQualityGate abortPipeline: true
// }
// }
// }
// stage("Quality Gate") {
// steps {
// timeout(time: 1, unit: 'HOURS') {
// waitForQualityGate abortPipeline: true
// }
// }
// }

// stage('Trivy Scan') {
// agent {
// kubernetes {
// inheritFrom 'pythondarm'
// defaultContainer 'python'
// }
// }

// steps {
// checkout scm
// sh 'curl -sfL https://raw.githubusercontent.com/aquasecurity/trivy/main/contrib/install.sh | sh -s -- -b /usr/local/bin ${TRIVY_VERSION}'
// sh 'mkdir -p /tmp/trivy/'
// sh 'aws s3 cp s3://e6-trivy-db/db.tar.gz /tmp/trivy/'
// sh 'tar -xvf /tmp/trivy/db.tar.gz'
// script {
// def trivyResult = sh(
// script: "trivy fs --exit-code 1 --cache-dir='/tmp/trivy' ${TRIVY_OPTIONS} --no-progress --scanners vuln,misconfig,secret .",
// returnStatus: true
// )
// if (trivyResult == 0) {
// // Trivy scan passed, push the image
// sh 'echo "Trivy scan passed for Python Scripts."'
// }
// else {
// error('Trivy scan failed')
// }
// }
// }
// }

stage('Cloud Authentication') {
agent {
kubernetes {
inheritFrom 'cloud'
defaultContainer 'cloud'
}
}

stage ('Trivy Scan') {
agent {
kubernetes {
inheritFrom 'pythondarm'
defaultContainer 'python'
environment {
GCP_SA_PATH = credentials('JENKINS_GCP_SA')
}

steps {
checkout scm
sh 'git config --global --add safe.directory "*"'
sh 'cp ${GCP_SA_PATH} hello.json && gcloud auth activate-service-account --key-file=hello.json'
sh 'gcloud config set project ${GCP_PROJECT_ID}'
sh 'gcloud auth configure-docker us-central1-docker.pkg.dev'
script {
env.GIT_COMMIT_HASH = sh(script: 'git rev-parse --short HEAD', returnStdout: true)
env.TAG_VALUE = "${IMAGE_TAG_PREFIX}${GIT_COMMIT_HASH}"
env.GCP_DOCKER_TOKEN = sh(returnStdout: true, script: "gcloud auth print-access-token").trim()
env.TEMP_ROLE = sh(returnStdout: true, script: 'aws sts assume-role --role-arn ${ASSUMED_ROLE_ARN} --role-session-name storage-service-${BUILD_NUMBER}').trim()
env.AWS_ACCESS_KEY_ID = sh(returnStdout: true, script: 'echo $TEMP_ROLE | jq -r ".Credentials.AccessKeyId"').trim()
env.AWS_SECRET_ACCESS_KEY = sh(returnStdout: true, script: 'echo $TEMP_ROLE | jq -r ".Credentials.SecretAccessKey"').trim()
env.AWS_SESSION_TOKEN = sh(returnStdout: true, script: 'echo $TEMP_ROLE | jq -r ".Credentials.SessionToken"').trim()
env.ECR_TOKEN = sh(returnStdout: true, script: "aws ecr get-login-password --region ${AWS_REGION} --output text").trim()
}
}
}
}

steps {
checkout scm
sh 'curl -sfL https://raw.githubusercontent.com/aquasecurity/trivy/main/contrib/install.sh | sh -s -- -b /usr/local/bin ${TRIVY_VERSION}'
sh 'mkdir -p /tmp/trivy/'
sh 'aws s3 cp s3://e6-trivy-db/db.tar.gz /tmp/trivy/'
sh 'tar -xvf /tmp/trivy/db.tar.gz'
script {
def trivyResult = sh(
script: "trivy fs --exit-code 1 --cache-dir='/tmp/trivy' ${TRIVY_OPTIONS} --no-progress --scanners vuln,misconfig,secret .",
returnStatus: true
)
if (trivyResult == 0) {
// Trivy scan passed, push the image
sh 'echo "Trivy scan passed for Python Scripts."'
}
else {
error('Trivy scan failed')
}
}
}
}

stage ('Cloud Authentication') {
agent {
kubernetes {
inheritFrom 'cloud'
defaultContainer 'cloud'
}
}

environment {
GCP_SA_PATH = credentials('JENKINS_GCP_SA')
}

steps {
checkout scm
sh 'git config --global --add safe.directory "*"'
sh 'cp ${GCP_SA_PATH} hello.json && gcloud auth activate-service-account --key-file=hello.json'
sh 'gcloud config set project ${GCP_PROJECT_ID}'
sh 'gcloud auth configure-docker us-central1-docker.pkg.dev'
script {
env.GIT_COMMIT_HASH = sh (script: 'git rev-parse --short HEAD', returnStdout: true)
env.TAG_VALUE = "${IMAGE_TAG_PREFIX}${GIT_COMMIT_HASH}"
env.GCP_DOCKER_TOKEN=sh(returnStdout: true, script: "gcloud auth print-access-token").trim()
env.TEMP_ROLE=sh(returnStdout: true, script: 'aws sts assume-role --role-arn ${ASSUMED_ROLE_ARN} --role-session-name storage-service-${BUILD_NUMBER}').trim()
env.AWS_ACCESS_KEY_ID=sh(returnStdout: true, script: 'echo $TEMP_ROLE | jq -r ".Credentials.AccessKeyId"').trim()
env.AWS_SECRET_ACCESS_KEY=sh(returnStdout: true, script: 'echo $TEMP_ROLE | jq -r ".Credentials.SecretAccessKey"').trim()
env.AWS_SESSION_TOKEN=sh(returnStdout: true, script: 'echo $TEMP_ROLE | jq -r ".Credentials.SessionToken"').trim()
env.ECR_TOKEN=sh(returnStdout: true, script: "aws ecr get-login-password --region ${AWS_REGION} --output text").trim()
}
}
stage('Production builds') {
parallel {
stage('ARM builds') {
agent {
kubernetes {
inheritFrom 'docker'
defaultContainer 'docker'
}
}

steps {
checkout scm
sh 'docker login -u oauth2accesstoken -p ${GCP_DOCKER_TOKEN} https://us-docker.pkg.dev'
sh 'docker buildx create --name mybuilder --use --platform linux/arm64,linux/amd64'
// sh 'mkdir -p /tmp/trivy/'
// sh 'aws s3 cp s3://e6-trivy-db/db.tar.gz /tmp/trivy/'
// sh 'tar -xvf /tmp/trivy/db.tar.gz'
sh 'docker buildx build --no-cache --platform linux/arm64 -t ${RELEASE_NAME} --load .'
sh 'curl -sfL https://raw.githubusercontent.com/aquasecurity/trivy/main/contrib/install.sh | sh -s -- -b /usr/local/bin ${TRIVY_VERSION}'
script {
// Trivy scan passed, push the image
sh 'docker buildx build --platform linux/arm64 -t ${PROD_IMAGE} --push --output=type=image,push-by-digest=true --metadata-file meta-arm64.json .'
env.ARM_HASH = sh(returnStdout: true, script: "cat meta-arm64.json | jq -r '.\"containerimage.digest\"'").trim()
}
}
}

stage('Production builds') {
parallel {
stage('ARM builds') {
agent {
kubernetes {
inheritFrom 'docker'
defaultContainer 'docker'
}
}

steps {
checkout scm
sh 'docker login -u oauth2accesstoken -p ${GCP_DOCKER_TOKEN} https://us-docker.pkg.dev'
sh 'docker buildx create --name mybuilder --use --platform linux/arm64,linux/amd64'
sh 'mkdir -p /tmp/trivy/'
sh 'aws s3 cp s3://e6-trivy-db/db.tar.gz /tmp/trivy/'
sh 'tar -xvf /tmp/trivy/db.tar.gz'
sh 'docker buildx build --no-cache --platform linux/arm64 -t ${RELEASE_NAME} --load .'
sh 'curl -sfL https://raw.githubusercontent.com/aquasecurity/trivy/main/contrib/install.sh | sh -s -- -b /usr/local/bin ${TRIVY_VERSION}'
script {
def trivyResult = sh(
script: "trivy image --exit-code 1 --cache-dir='/tmp/trivy' ${TRIVY_OPTIONS} --no-progress --scanners vuln,misconfig,secret ${RELEASE_NAME}",
returnStatus: true
)
if (trivyResult == 0) {
// Trivy scan passed, push the image
sh 'docker buildx build --platform linux/arm64 -t ${PROD_IMAGE} --push --output=type=image,push-by-digest=true --metadata-file meta-arm64.json .'
env.ARM_HASH = sh(returnStdout: true, script: "cat meta-arm64.json | jq -r '.\"containerimage.digest\"'").trim()
}
else {
error('Trivy scan failed for Docker image. Image will not be pushed.')
stage('AMD builds') {
agent {
kubernetes {
inheritFrom 'docker-amd'
defaultContainer 'docker'
}
}

steps {
checkout scm
sh 'docker login -u oauth2accesstoken -p ${GCP_DOCKER_TOKEN} https://us-docker.pkg.dev'
sh 'docker buildx create --name mybuilder --use --platform linux/arm64,linux/amd64'
// sh 'mkdir -p /tmp/trivy/'
// sh 'aws s3 cp s3://e6-trivy-db/db.tar.gz /tmp/trivy/'
// sh 'tar -xvf /tmp/trivy/db.tar.gz'
sh 'docker buildx build --no-cache --platform linux/amd64 -t ${RELEASE_NAME} --load .'
sh 'curl -sfL https://raw.githubusercontent.com/aquasecurity/trivy/main/contrib/install.sh | sh -s -- -b /usr/local/bin ${TRIVY_VERSION}'
script {
// Trivy scan passed, push the image
sh 'docker buildx build --platform linux/amd64 -t ${PROD_IMAGE} --push --output=type=image,push-by-digest=true --metadata-file meta-amd64.json .'
env.AMD_HASH = sh(returnStdout: true, script: "cat meta-amd64.json | jq -r '.\"containerimage.digest\"'").trim()
}
}
}
}
}
}
}

stage('AMD builds') {
agent {
kubernetes {
inheritFrom 'docker-amd'
defaultContainer 'docker'
}
}

steps {
checkout scm
sh 'docker login -u oauth2accesstoken -p ${GCP_DOCKER_TOKEN} https://us-docker.pkg.dev'
sh 'docker buildx create --name mybuilder --use --platform linux/arm64,linux/amd64'
sh 'mkdir -p /tmp/trivy/'
sh 'aws s3 cp s3://e6-trivy-db/db.tar.gz /tmp/trivy/'
sh 'tar -xvf /tmp/trivy/db.tar.gz'
sh 'docker buildx build --no-cache --platform linux/amd64 -t ${RELEASE_NAME} --load .'
sh 'curl -sfL https://raw.githubusercontent.com/aquasecurity/trivy/main/contrib/install.sh | sh -s -- -b /usr/local/bin ${TRIVY_VERSION}'
script {
def trivyResult = sh(
script: "trivy image --exit-code 1 --cache-dir='/tmp/trivy' ${TRIVY_OPTIONS} --no-progress --scanners vuln,misconfig,secret ${RELEASE_NAME}",
returnStatus: true
)
if (trivyResult == 0) {
// Trivy scan passed, push the image
sh 'docker buildx build --platform linux/amd64 -t ${PROD_IMAGE} --push --output=type=image,push-by-digest=true --metadata-file meta-amd64.json .'
env.AMD_HASH = sh(returnStdout: true, script: "cat meta-amd64.json | jq -r '.\"containerimage.digest\"'").trim()
}
else {
error('Trivy scan failed for Docker image. Image will not be pushed.')
stage('Prod artifacts- push') {
agent {
kubernetes {
inheritFrom 'docker'
defaultContainer 'docker'
}
}
}
}
}
}
}

stage('Prod artifacts- push') {
agent {
kubernetes {
inheritFrom 'docker'
defaultContainer 'docker'
steps {
sh 'docker login -u oauth2accesstoken -p ${GCP_DOCKER_TOKEN} https://us-docker.pkg.dev'
sh 'docker buildx imagetools create --tag ${PROD_IMAGE}:${TAG_VALUE} ${AMD_HASH} ${ARM_HASH}'
sh 'apk add skopeo'
sh 'skopeo login -u oauth2accesstoken -p ${GCP_DOCKER_TOKEN} https://us-docker.pkg.dev'
sh 'skopeo login --username AWS --password ${ECR_TOKEN} 670514002493.dkr.ecr.us-east-1.amazonaws.com'
sh 'skopeo login --username e6data-ci --password ${ACR_TOKEN} e6labs.azurecr.io'
sh 'skopeo copy docker://${PROD_IMAGE}:${TAG_VALUE} docker://${DEV_IMAGE}'
sh 'skopeo copy docker://${PROD_IMAGE}:${TAG_VALUE} docker://670514002493.dkr.ecr.us-east-1.amazonaws.com/${RELEASE_NAME}:${TAG_VALUE}'
sh 'skopeo copy docker://${PROD_IMAGE}:${TAG_VALUE} docker://${AZURE_IMAGE}:${TAG_VALUE}'
}
}
}

steps {
sh 'docker login -u oauth2accesstoken -p ${GCP_DOCKER_TOKEN} https://us-docker.pkg.dev'
sh 'docker buildx imagetools create --tag ${PROD_IMAGE}:${TAG_VALUE} ${AMD_HASH} ${ARM_HASH}'
sh 'apk add skopeo'
sh 'skopeo login -u oauth2accesstoken -p ${GCP_DOCKER_TOKEN} https://us-docker.pkg.dev'
sh 'skopeo login --username AWS --password ${ECR_TOKEN} 670514002493.dkr.ecr.us-east-1.amazonaws.com'
sh 'skopeo login --username e6data-ci --password ${ACR_TOKEN} e6labs.azurecr.io'
sh 'skopeo copy docker://${PROD_IMAGE}:${TAG_VALUE} docker://${DEV_IMAGE}'
sh 'skopeo copy docker://${PROD_IMAGE}:${TAG_VALUE} docker://670514002493.dkr.ecr.us-east-1.amazonaws.com/${RELEASE_NAME}:${TAG_VALUE}'
sh 'skopeo copy docker://${PROD_IMAGE}:${TAG_VALUE} docker://${AZURE_IMAGE}:${TAG_VALUE}'
}
}
}
}
}
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ fastapi==0.115.6
gitdb==4.0.11
GitPython==3.1.43
idna==3.7
Jinja2==3.1.5
Jinja2==3.1.6
jsonschema==4.23.0
jsonschema-specifications==2023.12.1
markdown-it-py==3.0.0
Expand Down
2 changes: 1 addition & 1 deletion sqlglot/dialects/e6.py
Original file line number Diff line number Diff line change
Expand Up @@ -1892,7 +1892,7 @@ def struct_sql(self, expression: exp.Struct) -> str:
keys.append(exp.Literal.string(f"_{i}"))
values.append(e)

return self.func("OBJECT_CONSTRUCT", *flatten(zip(keys, values)))
return self.func("named_struct", *flatten(zip(keys, values)))

def neq_sql(self, expression: exp.NEQ) -> str:
return self.binary(expression, "!=")
Expand Down
33 changes: 20 additions & 13 deletions tests/dialects/test_e6.py
Original file line number Diff line number Diff line change
Expand Up @@ -536,19 +536,26 @@ def test_group_concat(self):
)

def test_named_struct(self):
self.validate_identity("SELECT NAMED_STRUCT('key_1', 'one', 'key_2', NULL)")

self.validate_all(
"NAMED_STRUCT('key_1', 'one', 'key_2', NULL)",
read={
"bigquery": "JSON_OBJECT(['key_1', 'key_2'], ['one', NULL])",
"duckdb": "JSON_OBJECT('key_1', 'one', 'key_2', NULL)",
},
write={
"bigquery": "JSON_OBJECT('key_1', 'one', 'key_2', NULL)",
"duckdb": "JSON_OBJECT('key_1', 'one', 'key_2', NULL)",
"snowflake": "OBJECT_CONSTRUCT_KEEP_NULL('key_1', 'one', 'key_2', NULL)",
},
# self.validate_identity("SELECT NAMED_STRUCT('key_1', 'one', 'key_2', NULL)")
#
# self.validate_all(
# "NAMED_STRUCT('key_1', 'one', 'key_2', NULL)",
# read={
# "bigquery": "JSON_OBJECT(['key_1', 'key_2'], ['one', NULL])",
# "duckdb": "JSON_OBJECT('key_1', 'one', 'key_2', NULL)",
# },
# write={
# "bigquery": "JSON_OBJECT('key_1', 'one', 'key_2', NULL)",
# "duckdb": "JSON_OBJECT('key_1', 'one', 'key_2', NULL)",
# "snowflake": "OBJECT_CONSTRUCT_KEEP_NULL('key_1', 'one', 'key_2', NULL)",
# },
# )

self.validate_all(
"named_struct('x', x_start, 'y', y_start)",
read={
"databricks": "struct (x_start as x, y_start as y)"
}
)

def test_json_extract(self):
Expand Down
Loading