Skip to content

Testing changes in final step update kubernetes deployment #37

Testing changes in final step update kubernetes deployment

Testing changes in final step update kubernetes deployment #37

Workflow file for this run

name: Build and Deploy to Docker Hub
on:
push:
branches:
- main
jobs:
build:
runs-on: ubuntu-latest
permissions:
id-token: write
steps:
- name: Checkout code
uses: actions/checkout@v4
#
# - name: Set up Docker Build
# uses: docker/setup-buildx-action@v3
#
- name: Log in to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_HUB_USERNAME }}
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
#
# - name: Build, tag, and push image to Docker Hub
# env:
# DOCKER_HUB_USERNAME: ${{ secrets.DOCKER_HUB_USERNAME }}
# DOCKER_HUB_REPOSITORY: ${{ secrets.DOCKER_HUB_REPOSITORY }}
# run: |
# IMAGE_TAG=latest
# REPOSITORY_URI=${DOCKER_HUB_USERNAME}/${DOCKER_HUB_REPOSITORY}
#
# # Build the Docker image
# docker build -t $REPOSITORY_URI:$IMAGE_TAG .
#
# # Push the image to Docker Hub
# docker push $REPOSITORY_URI:$IMAGE_TAG
#
# - name: Set up AWS CLI
# uses: aws-actions/configure-aws-credentials@v4
# with:
# role-to-assume: arn:aws:iam::246386308913:role/eks-federated-deployer
# role-session-name: eks-deployer-temporal-session
# aws-region: ${{ secrets.AWS_REGION }}
- name: Configure AWS Credentials
run: |
aws configure set aws_access_key_id ${{ secrets.AWS_ACCESS_KEY_ID }}
aws configure set aws_secret_access_key ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws configure set default.region ${{ secrets.AWS_REGION }}
- name: Install kubectl
run: |
curl -LO "https://dl.k8s.io/release/$(curl -L -s https://dl.k8s.io/release/stable.txt)/bin/linux/amd64/kubectl"
chmod +x kubectl
sudo mv kubectl /usr/local/bin/
# - name: Create kube-config directory
# run: mkdir -p $HOME/.kube
- name: Decode and set kubeconfig
env:
KUBE_CONFIG_DATA: ${{ secrets.KUBE_CONFIG_DATA }}
run: |
echo "$KUBE_CONFIG_DATA" | base64 --decode > $HOME/.kube/config
shell: bash
- name: Set KUBECONFIG environment variable
run: echo "KUBECONFIG=$HOME/.kube/config" >> $GITHUB_ENV
# Verifying the AWS CLI configuration and testing the kubeconfig
- name: Validate AWS Configuration
run: |
which aws
aws --version
aws sts get-caller-identity
- name: Verify AWS Configuration
run: aws sts get-caller-identity # Check AWS CLI setup
- name: Print current working directory
run: pwd
- name: List files and directories in the current directory
run: ls -R
- name: Deploy to EKS
run: kubectl apply --validate=false -f k8s/deployment.yaml
# - name: Validate kubeconfig and Test kubectl Configuration
# run: |
# # Print the kubeconfig file content
# cat $HOME/.kube/config
# # Test kubectl configuration
# kubectl config get-contexts
# kubectl config use-context arn:aws:eks:us-east-1:246386308913:cluster/eks-cluster
# kubectl cluster-info
#
# - name: Update Kubernetes deployment
# env:
# #KUBE_CONFIG_DATA: ${{ secrets.KUBE_CONFIG_DATA }}
# DOCKER_IMAGE_URI: ${{ secrets.DOCKER_HUB_USERNAME }}/${{ secrets.DOCKER_HUB_REPOSITORY }}:latest
# K8S_DEPLOYMENT_NAME: ${{ secrets.K8S_DEPLOYMENT_NAME }}
# K8S_NAMESPACE: ${{ secrets.K8S_NAMESPACE }}
# run: |
# kubectl set image deployment/$K8S_DEPLOYMENT_NAME *=$DOCKER_IMAGE_URI -n $K8S_NAMESPACE