Skip to content

Commit

Permalink
fetch additional chart dependencies and install via helm
Browse files Browse the repository at this point in the history
  • Loading branch information
code-crusher committed Nov 14, 2024
1 parent cd0032f commit f929add
Show file tree
Hide file tree
Showing 2 changed files with 57 additions and 36 deletions.
18 changes: 12 additions & 6 deletions deployment.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ spec:
[
"sh",
"-c",
"until nc -z $POSTGRES_HOST ${POSTGRES_PORT:-5432} && nc -z $REDIS_HOST ${REDIS_PORT:-6379}; do echo waiting for postgres and redis; if ! nc -z $POSTGRES_HOST ${POSTGRES_PORT:-5432}; then echo postgres not ready; fi; if ! nc -z $REDIS_HOST ${REDIS_PORT:-6379}; then echo redis not ready; fi; sleep 2; done;",
"until nc -z $POSTGRES_HOST ${POSTGRES_PORT} && nc -z $REDIS_HOST ${REDIS_PORT}; do echo waiting for postgres and redis; if ! nc -z $POSTGRES_HOST ${POSTGRES_PORT:-5432}; then echo postgres not ready; fi; if ! nc -z $REDIS_HOST ${REDIS_PORT:-6379}; then echo redis not ready; fi; sleep 2; done;",
]
env:
- name: POSTGRES_HOST
Expand Down Expand Up @@ -234,7 +234,7 @@ apiVersion: apps/v1
kind: Deployment
metadata:
name: redis-gravity
namespace:
namespace: ${NAMESPACE}
spec:
replicas: 1
selector:
Expand Down Expand Up @@ -347,7 +347,13 @@ metadata:
name: gravity-job-agent-role
rules:
- apiGroups: [""]
resources: ["pods", "pods/log", "pods/exec", "services", "secrets", "configmaps", "persistentvolumeclaims"]
resources: ["pods", "pods/log", "pods/exec", "services", "secrets", "configmaps", "persistentvolumeclaims", "serviceaccounts"]
verbs: ["get", "list", "watch", "create", "update", "patch", "delete"]
- apiGroups: [""]
resources: ["secrets"]
verbs: ["get", "list", "watch"]
- apiGroups: [""]
resources: ["namespaces"]
verbs: ["get", "list", "watch", "create", "update", "patch", "delete"]
- apiGroups: ["apps"]
resources: ["deployments", "statefulsets", "replicasets"]
Expand All @@ -356,11 +362,11 @@ rules:
resources: ["jobs"]
verbs: ["get", "list", "watch", "create", "update", "patch", "delete"]
- apiGroups: ["networking.k8s.io"]
resources: ["ingresses"]
resources: ["ingresses", "networkpolicies"]
verbs: ["get", "list", "watch", "create", "update", "patch", "delete"]
- apiGroups: ["rbac.authorization.k8s.io"]
resources: ["roles", "rolebindings"]
verbs: ["get", "list", "watch", "create", "update", "patch", "delete"]
- apiGroups: ["policy"]
resources: ["podsecuritypolicies"]
verbs: ["use"]
resources: ["podsecuritypolicies", "poddisruptionbudgets"]
verbs: ["get", "list", "watch", "create", "update", "patch", "delete"]
75 changes: 45 additions & 30 deletions src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,11 @@ interface ServiceChange {
lastCommitSha?: string
}

interface PipelineCharts {
charts: any[],
branch: string
}

interface DeployRun {
id: string;
name: string;
Expand Down Expand Up @@ -630,42 +635,12 @@ const processJob = async () => {
const serviceContext = path.join(gitRepoPath, service.servicePath)
const dockerfilePath = path.join(serviceContext, 'Dockerfile')

// let dockerBuildCommand = ""

// let cacheExists = false
// const imageCacheBasePath = process.env.ENV === "production" ? "/image-cache" : "./image-cache"
// try {
// await customExec(deploymentRunId, "DOCKER_IMAGE_BUILD", serviceName, `ls ${imageCacheBasePath}/${owner}-${serviceName}-latest.tar`)
// cacheExists = true
// } catch (error) {
// syncLogsToGravityViaWebsocket(deploymentRunId, "DOCKER_IMAGE_BUILD", serviceName, `No cache found for ${owner}/${serviceName}:latest, proceeding with fresh build`)
// }

// if (cacheExists) {
// syncLogsToGravityViaWebsocket(deploymentRunId, "DOCKER_IMAGE_BUILD", serviceName, `Cache found for ${owner}/${serviceName}:latest, using it for build`)
// if (process.env.ENV === "developement") {
// await customExec(deploymentRunId, "DOCKER_CACHE_LOAD", serviceName, `${dockerBuildCli} load -i ${imageCacheBasePath}/${owner}-${serviceName}-latest.tar`)
// } else {
// dockerBuildCommand = `${dockerBuildCli} ${process.env.ENV === "production" ? "bud --isolation chroot" : "build"} --platform=linux/amd64 -t ${owner}/${serviceName}:latest -f ${dockerfilePath} ${serviceContext} ${process.env.ENV === "production" ? `--cache-from=type=tar,dest=${imageCacheBasePath}/${owner}-${serviceName}-latest.tar` : `--cache-from ${owner}/${serviceName}:latest`}`
// }
// } else {
// if (process.env.ENV === "developement") {
// dockerBuildCommand = `${dockerBuildCli} build --platform=linux/amd64 -t ${owner}/${serviceName}:latest -f ${dockerfilePath} ${serviceContext}`
// } else {
// dockerBuildCommand = `${dockerBuildCli} ${process.env.ENV === "production" ? "bud --isolation chroot" : "build"} --platform=linux/amd64 -t ${owner}/${serviceName}:latest -f ${dockerfilePath} ${serviceContext} --cache-to=type=tar,dest=${imageCacheBasePath}/${owner}-${serviceName}-latest.tar`
// }
// }

const dockerBuildCommand = `${dockerBuildCli} ${process.env.ENV === "production" ? "bud --isolation chroot" : "build"} --platform=linux/amd64 -t ${owner}/${serviceName}:latest -f ${dockerfilePath} ${serviceContext}`

await customExec(deploymentRunId, "DOCKER_IMAGE_BUILD", serviceName, dockerBuildCommand)

sendSlackNotification("Docker Build Completed", `Docker build completed for ${serviceName} / ${lastRunBranch} in ${repository}`)

// if (process.env.ENV === "developement") {
// await customExec(deploymentRunId, "DOCKER_IMAGE_CACHE", serviceName, `${dockerBuildCli} save -o ${imageCacheBasePath}/${owner}-${serviceName}-latest.tar ${owner}/${serviceName}:latest`)
// }

// Continue with existing AWS deployment logic
const newValuesFiles: string[] = []

Expand Down Expand Up @@ -893,6 +868,46 @@ const processJob = async () => {
[JSON.stringify(newValuesFiles), "COMPLETED", deploymentRunId]
)

const matchedAllowedRegex = process.env.GIT_BRANCHES_ALLOWED?.split(',').find(allowedBranch => {

Check failure on line 871 in src/index.ts

View workflow job for this annotation

GitHub Actions / Build and Deploy

Not all code paths return a value.

Check failure on line 871 in src/index.ts

View workflow job for this annotation

GitHub Actions / Build and Deploy

Not all code paths return a value.
// Check for exact match first
if (allowedBranch === lastRunBranch) {
return allowedBranch;
}
// Check for wildcard pattern match
if (allowedBranch.endsWith('.*')) {
const prefix = allowedBranch.slice(0, -2).replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
const pattern = new RegExp(`^${prefix}.*$`);
if (pattern.test(lastRunBranch)) {
return allowedBranch;
}
}
});


if (matchedAllowedRegex && process.env.GRAVITY_API_URL) {
syncLogsToGravityViaWebsocket(deploymentRunId, "CHART_DEPENDENCIES", serviceName, `Fetching chart dependecies for branch: ${matchedAllowedRegex}`, false)
const pipelineCharts: PipelineCharts = await axios.post(`${process.env.GRAVITY_API_URL}/api/v1/pipeline-charts`, {
awsAccountId: process.env.AWS_ACCOUNT_ID,
env: process.env.ENV,
branch: lastRunBranch
})

syncLogsToGravityViaWebsocket(deploymentRunId, "CHART_DEPENDENCIES", serviceName, `Found following charts: ${JSON.stringify(pipelineCharts?.charts)}`, false)
if (pipelineCharts?.charts?.length > 0) {
// save the new values file locally and pass in helm command
const tempDir = os.tmpdir()
const valuesFilePath = path.join(tempDir, `${serviceName}-values-${lastRunBranch}.yaml`)
fs.writeFileSync(valuesFilePath, JSON.stringify(newValuesFiles))

syncLogsToGravityViaWebsocket(deploymentRunId, "CHART_DEPLOYMENT", serviceName, `Deploying charts: ${JSON.stringify(pipelineCharts?.charts)}`, false)
await Promise.all(pipelineCharts?.charts?.map(async (chart: any) => {
syncLogsToGravityViaWebsocket(deploymentRunId, "CHART_DEPLOYMENT", serviceName, `Deploying chart: ${JSON.stringify(chart)}`, false)
const helmChartInstallCommand = `helm upgrade --install ${chart.name} ${chart.repository} --namespace ${lastRunBranch} --version ${chart.version} -f ${valuesFilePath}`
await customExec(deploymentRunId, "CHART_DEPLOYMENT", serviceName, helmChartInstallCommand, true)
}))
}
}

syncLogsToGravityViaWebsocket(deploymentRunId, "PIPELINE_COMPLETED", serviceName, JSON.stringify({ newValuesFiles }), false)

// Cleanup
Expand Down

0 comments on commit f929add

Please sign in to comment.