Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
47 changes: 0 additions & 47 deletions .github/workflows/deploy-application.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@ on:
options:
- all
- web
- good-job
- sidekiq
default: all
workflow_call:
Expand Down Expand Up @@ -82,8 +81,6 @@ jobs:
"application=" + .codedeploy_application_name.value,
"application_group=" + .codedeploy_deployment_group_name.value,
"cluster_name=" + .ecs_variables.value.cluster_name,
"good_job_service=" + .ecs_variables.value.good_job.service_name,
"good_job_task_definition=" + .ecs_variables.value.good_job.task_definition.arn,
"sidekiq_service=" + .ecs_variables.value.sidekiq.service_name,
"sidekiq_task_definition=" + .ecs_variables.value.sidekiq.task_definition.arn
' > ${{ runner.temp }}/DEPLOYMENT_ENVS
Expand Down Expand Up @@ -126,50 +123,6 @@ jobs:
aws deploy wait deployment-successful --deployment-id "$deployment_id"
echo "Deployment successful"

create-good-job-deployment:
name: Create good-job deployment
runs-on: ubuntu-latest
needs: prepare-deployment
if: inputs.server_types == 'good-job' || inputs.server_types == 'all'
permissions:
id-token: write
steps:
- name: Download Artifact
uses: actions/download-artifact@v5
with:
name: DEPLOYMENT_ENVS-${{ inputs.environment }}
path: ${{ runner.temp }}
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v5
with:
role-to-assume: ${{ env.aws-role }}
aws-region: eu-west-2
- name: Trigger ECS Deployment
run: |
set -e
source ${{ runner.temp }}/DEPLOYMENT_ENVS
DEPLOYMENT_ID=$(aws ecs update-service --cluster $cluster_name --service $good_job_service \
--task-definition $good_job_task_definition --force-new-deployment \
--query 'service.deployments[?rolloutState==`IN_PROGRESS`].[id][0]' --output text)
echo "Deployment started: $DEPLOYMENT_ID"
echo "deployment_id=$DEPLOYMENT_ID" >> $GITHUB_ENV
- name: Wait for deployment to complete
run: |
set -e
source ${{ runner.temp }}/DEPLOYMENT_ENVS
DEPLOYMENT_STATE=IN_PROGRESS
while [ "$DEPLOYMENT_STATE" == "IN_PROGRESS" ]; do
echo "Waiting for deployment to complete..."
sleep 30
DEPLOYMENT_STATE="$(aws ecs describe-services --cluster $cluster_name --services $good_job_service \
--query "services[0].deployments[?id == \`$deployment_id\`].[rolloutState][0]" --output text)"
done
if [ "$DEPLOYMENT_STATE" != "COMPLETED" ]; then
echo "Deployment failed with state: $DEPLOYMENT_STATE"
exit 1
fi
echo "Deployment successful"

create-sidekiq-deployment:
name: Create sidekiq deployment
runs-on: ubuntu-latest
Expand Down
1 change: 0 additions & 1 deletion .github/workflows/deploy.yml
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,6 @@ on:
options:
- all
- web
- good-job
- sidekiq
- none
default: all
Expand Down
1 change: 0 additions & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,5 @@ VOLUME ["/rails/db/data", "/rails/tmp", "/rails/log", "/tmp", "/var/log", "/var/
# Start web server by default, this can be overwritten by environment variable
EXPOSE 4000
ENV HTTP_PORT=4000
ENV GOOD_JOB_PROBE_PORT=4000
ENV SERVER_TYPE=web
CMD ["./bin/docker-start"]
1 change: 0 additions & 1 deletion Gemfile
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,6 @@ gem "fhir_models"
gem "flipper"
gem "flipper-active_record"
gem "flipper-ui"
gem "good_job"
gem "govuk-components"
gem "govuk_design_system_formbuilder"
gem "govuk_markdown"
Expand Down
12 changes: 2 additions & 10 deletions Gemfile.lock
Original file line number Diff line number Diff line change
Expand Up @@ -220,7 +220,7 @@ GEM
activemodel
erb (5.0.2)
erubi (1.13.1)
et-orbi (1.2.11)
et-orbi (1.3.0)
tzinfo
factory_bot (6.5.5)
activesupport (>= 6.1.0)
Expand Down Expand Up @@ -265,18 +265,11 @@ GEM
rack-protection (>= 1.5.3, < 5.0.0)
rack-session (>= 1.0.2, < 3.0.0)
sanitize (< 8)
fugit (1.11.1)
fugit (1.11.2)
et-orbi (~> 1, >= 1.2.11)
raabro (~> 1.4)
globalid (1.2.1)
activesupport (>= 6.1)
good_job (4.11.2)
activejob (>= 6.1.0)
activerecord (>= 6.1.0)
concurrent-ruby (>= 1.3.1)
fugit (>= 1.11.0)
railties (>= 6.1.0)
thor (>= 1.0.0)
govuk-components (5.11.3)
html-attributes-utils (~> 1.0.0, >= 1.0.0)
pagy (>= 6, < 10)
Expand Down Expand Up @@ -800,7 +793,6 @@ DEPENDENCIES
flipper
flipper-active_record
flipper-ui
good_job
govuk-components
govuk_design_system_formbuilder
govuk_markdown
Expand Down
5 changes: 1 addition & 4 deletions bin/docker-start
Original file line number Diff line number Diff line change
Expand Up @@ -5,16 +5,13 @@ BIN_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
if [ "$SERVER_TYPE" == "web" ]; then
echo "Starting web server..."
exec "$BIN_DIR"/thrust "$BIN_DIR"/rails server
elif [ "$SERVER_TYPE" == "good-job" ]; then
echo "Starting good-job server..."
exec "$BIN_DIR"/good_job start
elif [ "$SERVER_TYPE" == "sidekiq" ]; then
echo "Starting sidekiq server..."
exec "$BIN_DIR"/sidekiq
elif [ "$SERVER_TYPE" == "none" ]; then
echo "No server started"
exec tail -f /dev/null # Keep container running
else
echo "SERVER_TYPE variable: '$SERVER_TYPE' unknown. Allowed values ['web','good-job', 'none']"
echo "SERVER_TYPE variable: '$SERVER_TYPE' unknown. Allowed values: web, sidekiq, none"
exit 1
fi
5 changes: 0 additions & 5 deletions bin/good_job

This file was deleted.

1 change: 0 additions & 1 deletion config/application.rb
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,6 @@ class Application < Rails::Application
config.active_model.i18n_customize_full_message = true

config.active_job.queue_adapter = :sidekiq
config.good_job.execution_mode = :external

config.view_component.default_preview_layout = "component_preview"
config.view_component.previews.controller = "ComponentPreviewsController"
Expand Down
2 changes: 1 addition & 1 deletion config/database.yml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
default: &default
adapter: postgresql
encoding: unicode
pool: <%= ENV.fetch("RAILS_MAX_THREADS", 5).to_i + ENV.fetch("GOOD_JOB_MAX_THREADS", 4).to_i + ENV.fetch("SIDEKIQ_CONCURRENCY", 4).to_i %>
pool: <%= ENV.fetch("RAILS_MAX_THREADS", 5).to_i + ENV.fetch("SIDEKIQ_CONCURRENCY", 4).to_i %>

development:
<<: *default
Expand Down
3 changes: 0 additions & 3 deletions config/environments/development.rb
Original file line number Diff line number Diff line change
Expand Up @@ -81,9 +81,6 @@
# Prevent health checks from clogging up the logs.
config.silence_healthcheck_path = "/up"

# Set up GoodJob for async execution in development mode
config.good_job.execution_mode = :async

# Enable strict loading to catch N+1 problems.
config.active_record.strict_loading_by_default = true
config.active_record.strict_loading_mode = :n_plus_one_only
Expand Down
3 changes: 0 additions & 3 deletions config/environments/test.rb
Original file line number Diff line number Diff line change
Expand Up @@ -47,9 +47,6 @@
# Raise error when a before_action's only/except options reference missing actions.
config.action_controller.raise_on_missing_callback_actions = true

# Set up GoodJob for inline execution in test mode
config.good_job.execution_mode = :inline

# Enable strict loading to catch N+1 problems.
config.active_record.strict_loading_by_default = true
config.active_record.strict_loading_mode = :n_plus_one_only
Expand Down
14 changes: 0 additions & 14 deletions config/initializers/good_job.rb

This file was deleted.

17 changes: 0 additions & 17 deletions config/puma.rb
Original file line number Diff line number Diff line change
Expand Up @@ -57,22 +57,5 @@
# processes).
workers Settings.web_concurrency

if Settings.web_concurrency > 1 &&
Rails.configuration.good_job.execution_mode != :external
# Cleanly shut down GoodJob when Puma is shut down.
# See https://github.com/bensheldon/good_job#execute-jobs-async--in-process
MAIN_PID = Process.pid
before_fork { GoodJob.shutdown }
before_worker_boot { GoodJob.restart }
before_worker_shutdown { GoodJob.shutdown }
at_exit { GoodJob.shutdown if Process.pid == MAIN_PID }

# Use the `preload_app!` method when specifying a `workers` number.
# This directive tells Puma to first boot the application and load code
# before forking the application. This takes advantage of Copy On Write
# process behavior so workers use less memory.
preload_app!
end

# Re-open appenders after forking the process; needed for Semantic Logger
before_worker_boot { SemanticLogger.reopen }
2 changes: 0 additions & 2 deletions config/routes.rb
Original file line number Diff line number Diff line change
Expand Up @@ -42,8 +42,6 @@

root to: redirect("/start")

mount GoodJob::Engine => "/good-job"

Sidekiq::Web.use Rack::Auth::Basic do |username, password|
ActiveSupport::SecurityUtils.secure_compare(
Rails.application.credentials.support_username,
Expand Down
11 changes: 11 additions & 0 deletions db/migrate/20250909100315_drop_good_job.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
# frozen_string_literal: true

class DropGoodJob < ActiveRecord::Migration[8.0]
def up
drop_table :good_jobs
drop_table :good_job_batches
drop_table :good_job_executions
drop_table :good_job_processes
drop_table :good_job_settings
end
end
88 changes: 0 additions & 88 deletions db/schema.rb
Original file line number Diff line number Diff line change
Expand Up @@ -313,94 +313,6 @@
t.index ["session_date_id"], name: "index_gillick_assessments_on_session_date_id"
end

create_table "good_job_batches", id: :uuid, default: -> { "gen_random_uuid()" }, force: :cascade do |t|
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.text "description"
t.jsonb "serialized_properties"
t.text "on_finish"
t.text "on_success"
t.text "on_discard"
t.text "callback_queue_name"
t.integer "callback_priority"
t.datetime "enqueued_at"
t.datetime "discarded_at"
t.datetime "finished_at"
end

create_table "good_job_executions", id: :uuid, default: -> { "gen_random_uuid()" }, force: :cascade do |t|
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.uuid "active_job_id", null: false
t.text "job_class"
t.text "queue_name"
t.jsonb "serialized_params"
t.datetime "scheduled_at"
t.datetime "finished_at"
t.text "error"
t.integer "error_event", limit: 2
t.text "error_backtrace", array: true
t.uuid "process_id"
t.interval "duration"
t.index ["active_job_id", "created_at"], name: "index_good_job_executions_on_active_job_id_and_created_at"
t.index ["process_id", "created_at"], name: "index_good_job_executions_on_process_id_and_created_at"
end

create_table "good_job_processes", id: :uuid, default: -> { "gen_random_uuid()" }, force: :cascade do |t|
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.jsonb "state"
t.integer "lock_type", limit: 2
end

create_table "good_job_settings", id: :uuid, default: -> { "gen_random_uuid()" }, force: :cascade do |t|
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.text "key"
t.jsonb "value"
t.index ["key"], name: "index_good_job_settings_on_key", unique: true
end

create_table "good_jobs", id: :uuid, default: -> { "gen_random_uuid()" }, force: :cascade do |t|
t.text "queue_name"
t.integer "priority"
t.jsonb "serialized_params"
t.datetime "scheduled_at"
t.datetime "performed_at"
t.datetime "finished_at"
t.text "error"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.uuid "active_job_id"
t.text "concurrency_key"
t.text "cron_key"
t.uuid "retried_good_job_id"
t.datetime "cron_at"
t.uuid "batch_id"
t.uuid "batch_callback_id"
t.boolean "is_discrete"
t.integer "executions_count"
t.text "job_class"
t.integer "error_event", limit: 2
t.text "labels", array: true
t.uuid "locked_by_id"
t.datetime "locked_at"
t.index ["active_job_id", "created_at"], name: "index_good_jobs_on_active_job_id_and_created_at"
t.index ["batch_callback_id"], name: "index_good_jobs_on_batch_callback_id", where: "(batch_callback_id IS NOT NULL)"
t.index ["batch_id"], name: "index_good_jobs_on_batch_id", where: "(batch_id IS NOT NULL)"
t.index ["concurrency_key"], name: "index_good_jobs_on_concurrency_key_when_unfinished", where: "(finished_at IS NULL)"
t.index ["cron_key", "created_at"], name: "index_good_jobs_on_cron_key_and_created_at_cond", where: "(cron_key IS NOT NULL)"
t.index ["cron_key", "cron_at"], name: "index_good_jobs_on_cron_key_and_cron_at_cond", unique: true, where: "(cron_key IS NOT NULL)"
t.index ["finished_at"], name: "index_good_jobs_jobs_on_finished_at", where: "((retried_good_job_id IS NULL) AND (finished_at IS NOT NULL))"
t.index ["labels"], name: "index_good_jobs_on_labels", where: "(labels IS NOT NULL)", using: :gin
t.index ["locked_by_id"], name: "index_good_jobs_on_locked_by_id", where: "(locked_by_id IS NOT NULL)"
t.index ["priority", "created_at"], name: "index_good_job_jobs_for_candidate_lookup", where: "(finished_at IS NULL)"
t.index ["priority", "created_at"], name: "index_good_jobs_jobs_on_priority_created_at_when_unfinished", order: { priority: "DESC NULLS LAST" }, where: "(finished_at IS NULL)"
t.index ["priority", "scheduled_at"], name: "index_good_jobs_on_priority_scheduled_at_unfinished_unlocked", where: "((finished_at IS NULL) AND (locked_by_id IS NULL))"
t.index ["queue_name", "scheduled_at"], name: "index_good_jobs_on_queue_name_and_scheduled_at", where: "(finished_at IS NULL)"
t.index ["scheduled_at"], name: "index_good_jobs_on_scheduled_at", where: "(finished_at IS NULL)"
end

create_table "health_questions", force: :cascade do |t|
t.string "title", null: false
t.bigint "vaccine_id", null: false
Expand Down
2 changes: 1 addition & 1 deletion docs/terraform.md
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,6 @@ tf apply -var-file=env/$env.tfvars -var="image_digest=<image_digest_from_ECR>"
```

Step 3: Run Codedeploy from the AWS Console
Step 4: If needed, trigger a deployment for the good-job service from the AWS ECS Console
Step 4: If needed, trigger a deployment for the sidekiq service from the AWS ECS Console

For a more high-level description of the process see [deployment-process.md](../terraform/documentation/deployment-process.md)
26 changes: 0 additions & 26 deletions terraform/app/ecs.tf
Original file line number Diff line number Diff line change
Expand Up @@ -58,32 +58,6 @@ module "web_service" {
deployment_controller = "CODE_DEPLOY"
}

module "good_job_service" {
source = "./modules/ecs_service"
task_config = {
environment = local.task_envs
secrets = local.task_secrets
cpu = 1024
memory = 2048
docker_image = "${var.account_id}.dkr.ecr.eu-west-2.amazonaws.com/${var.docker_image}@${var.image_digest}"
execution_role_arn = aws_iam_role.ecs_task_execution_role.arn
task_role_arn = aws_iam_role.ecs_task_role.arn
log_group_name = aws_cloudwatch_log_group.ecs_log_group.name
region = var.region
health_check_command = ["CMD-SHELL", "./bin/internal_healthcheck http://localhost:4000/status/connected"]
}
network_params = {
subnets = [aws_subnet.private_subnet_a.id, aws_subnet.private_subnet_b.id]
vpc_id = aws_vpc.application_vpc.id
}
minimum_replica_count = var.good_job_replicas
maximum_replica_count = var.good_job_replicas
cluster_id = aws_ecs_cluster.cluster.id
cluster_name = aws_ecs_cluster.cluster.name
environment = var.environment
server_type = "good-job"
}

module "sidekiq_service" {
source = "./modules/ecs_service"
task_config = {
Expand Down
1 change: 0 additions & 1 deletion terraform/app/env/sandbox-alpha.tfvars
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@ minimum_web_replicas = 1
maximum_web_replicas = 2
minimum_sidekiq_replicas = 1
maximum_sidekiq_replicas = 2
good_job_replicas = 1

valkey_node_type = "cache.t4g.micro"
valkey_log_retention_days = 3
Expand Down
1 change: 0 additions & 1 deletion terraform/app/env/sandbox-beta.tfvars
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@ minimum_web_replicas = 1
maximum_web_replicas = 2
minimum_sidekiq_replicas = 1
maximum_sidekiq_replicas = 2
good_job_replicas = 1

# Valkey serverless configuration - minimal settings for sandbox
valkey_node_type = "cache.t4g.micro"
Expand Down
Loading