Skip to content

Commit

Permalink
bug-1908543: drop support for AWS
Browse files Browse the repository at this point in the history
  • Loading branch information
relud committed Jul 19, 2024
1 parent 3788089 commit a6159f3
Show file tree
Hide file tree
Showing 57 changed files with 233 additions and 2,928 deletions.
15 changes: 5 additions & 10 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ my.env:
build: my.env ## | Build docker images.
${DC} build ${DOCKER_BUILD_OPTS} --build-arg userid=${SOCORRO_UID} --build-arg groupid=${SOCORRO_GID} --progress plain app
${DC} build --progress plain oidcprovider fakesentry gcs-emulator
${DC} build --progress plain statsd postgresql memcached localstack elasticsearch symbolsserver
${DC} build --progress plain statsd postgresql memcached elasticsearch symbolsserver
touch .docker-build

.PHONY: devcontainerbuild
Expand All @@ -64,7 +64,7 @@ devcontainer: my.env .devcontainer-build ## | Run VS Code development container
${DC} up --detach devcontainer

.PHONY: setup
setup: my.env .docker-build ## | Set up Postgres, Elasticsearch, local SQS, and local S3 services.
setup: my.env .docker-build ## | Set up Postgres, Elasticsearch, local Pub/Sub, and local GCS services.
${DC} run --rm app shell /app/bin/setup_services.sh

.PHONY: updatedata
Expand All @@ -81,11 +81,10 @@ run: my.env ## | Run processor, webapp, fakesentry, symbolsserver, and required
processor webapp fakesentry symbolsserver

.PHONY: runservices
runservices: my.env ## | Run service containers (Postgres, SQS, etc)
runservices: my.env ## | Run service containers (Postgres, Pub/Sub, etc)
${DC} up -d --remove-orphans \
elasticsearch \
gcs-emulator \
localstack \
memcached \
postgresql \
pubsub \
Expand Down Expand Up @@ -133,18 +132,14 @@ psql: my.env .docker-build ## | Open psql cli.

.PHONY: test
test: my.env .docker-build ## | Run unit tests.
# Make sure services are started and start localstack before the others to
# give it a little more time to wake up
${DC} up -d localstack
# Make sure services are started
${DC} up -d elasticsearch postgresql statsd
# Run tests
${DC} run --rm test shell ./bin/test.sh

.PHONY: test-ci
test-ci: my.env .docker-build ## | Run unit tests in CI.
# Make sure services are started and start localstack before the others to
# give it a little more time to wake up
${DC} up -d localstack
# Make sure services are started
${DC} up -d elasticsearch postgresql statsd
# Run tests in test-ci which doesn't volume mount local directory
${DC} run --rm test-ci shell ./bin/test.sh
Expand Down
11 changes: 5 additions & 6 deletions bin/load_processed_crashes_into_es.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,12 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.

# Loads a processed crash by either crash ID or date from either
# GCS or S3 into Elasticsearch, depending on `settings.CRASH_SOURCE`,
# Loads a processed crash by either crash ID or date from crash
# storage into Elasticsearch, depending on `settings.CRASH_SOURCE`,
# optionally skipping crashes already in Elasticsearch.

# Uses a variation of `check_crash_ids_for_date`
# from the `verifyprocessed` command in Crash Stats to get crash IDs from S3/GCS:
# Uses a variation of `check_crash_ids_for_date` from the `verifyprocessed`
# command in Crash Stats to get crash IDs from crash storage:
# /~https://github.com/mozilla-services/socorro/blob/3f39c6aaa7f294884f3261fd268e8084d5eec93a/webapp/crashstats/crashstats/management/commands/verifyprocessed.py#L77-L115

# Usage: ./bin/load_processed_crash_into_es.py [OPTIONS] [CRASH_ID | DATE]
Expand Down Expand Up @@ -170,8 +170,7 @@ def save_crash_to_es(crash_id):
@click.pass_context
def load_crashes(ctx, date, crash_id, num_workers, only_missing_in_es):
"""
Loads processed crashes into Elasticsearch by crash source (S3 or GCS)
and either crash ID or date.
Loads processed crashes into Elasticsearch by either crash ID or date.
Must specify either CRASH_ID or DATE.
Expand Down
21 changes: 4 additions & 17 deletions bin/process_crashes.sh
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
set -euo pipefail

DATADIR=./crashdata_tryit_tmp
CLOUD_PROVIDER="${CLOUD_PROVIDER:-AWS}"

function cleanup {
# Cleans up files generated by the script
Expand All @@ -47,24 +46,12 @@ mkdir "${DATADIR}" || echo "${DATADIR} already exists."
./socorro-cmd fetch_crash_data "${DATADIR}" $@

# Make the bucket and sync contents
# ^^ returns CLOUD_PROVIDER value as uppercase
if [[ "${CLOUD_PROVIDER^^}" == "GCP" ]]; then
./socorro-cmd gcs create "${CRASHSTORAGE_GCS_BUCKET}"
./socorro-cmd gcs upload "${DATADIR}" "${CRASHSTORAGE_GCS_BUCKET}"
./socorro-cmd gcs list_objects "${CRASHSTORAGE_GCS_BUCKET}"
else
./bin/socorro_aws_s3.sh mb "s3://${CRASHSTORAGE_S3_BUCKET}/"
./bin/socorro_aws_s3.sh cp --recursive "${DATADIR}" "s3://${CRASHSTORAGE_S3_BUCKET}/"
./bin/socorro_aws_s3.sh ls --recursive "s3://${CRASHSTORAGE_S3_BUCKET}/"
fi
./socorro-cmd gcs create "${CRASHSTORAGE_GCS_BUCKET}"
./socorro-cmd gcs upload "${DATADIR}" "${CRASHSTORAGE_GCS_BUCKET}"
./socorro-cmd gcs list_objects "${CRASHSTORAGE_GCS_BUCKET}"

# Add crash ids to queue
# ^^ returns CLOUD_PROVIDER value as uppercase
if [[ "${CLOUD_PROVIDER^^}" == "GCP" ]]; then
./socorro-cmd pubsub publish "${PUBSUB_PROJECT_ID}" "${PUBSUB_STANDARD_TOPIC_NAME}" $@
else
./socorro-cmd sqs publish "${SQS_STANDARD_QUEUE}" $@
fi
./socorro-cmd pubsub publish "${PUBSUB_PROJECT_ID}" "${PUBSUB_STANDARD_TOPIC_NAME}" $@

# Print urls to make it easier to look at them
for crashid in "$@"
Expand Down
4 changes: 2 additions & 2 deletions bin/pubsub_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,7 @@ def pull(ctx, project_id, subscription_name, ack, max_messages):
@pubsub_group.command("create-all")
@click.pass_context
def create_all(ctx):
"""Create SQS queues related to processing."""
"""Create Pub/Sub queues related to processing."""
options = settings.QUEUE_PUBSUB["options"]
project_id = options["project_id"]
queues = {
Expand All @@ -194,7 +194,7 @@ def create_all(ctx):
@pubsub_group.command("delete-all")
@click.pass_context
def delete_all(ctx):
"""Delete SQS queues related to processing."""
"""Delete Pub/Sub queues related to processing."""
options = settings.QUEUE_PUBSUB["options"]
project_id = options["project_id"]
for topic_name in (
Expand Down
23 changes: 0 additions & 23 deletions bin/recreate_s3_buckets.sh

This file was deleted.

9 changes: 1 addition & 8 deletions bin/setup_services.sh
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

# Usage: bin/setup_services.sh
#
# Deletes all service state data and rebuilds database tables, S3 buckets,
# Deletes all service state data and rebuilds database tables, buckets,
# and other service state.
#
# Note: This should be called from inside a container.
Expand All @@ -23,17 +23,10 @@ set -euo pipefail
/app/socorro-cmd gcs delete "${TELEMETRY_GCS_BUCKET}"
/app/socorro-cmd gcs create "${TELEMETRY_GCS_BUCKET}"

# Delete and create local S3 buckets
/app/bin/recreate_s3_buckets.sh

# Delete and create Elasticsearch indices
/app/socorro-cmd es delete
/app/socorro-cmd es create

# Delete and create SQS queues
/app/socorro-cmd sqs delete-all
/app/socorro-cmd sqs create-all

# Delete and create Pub/Sub queues
/app/socorro-cmd pubsub delete-all
/app/socorro-cmd pubsub create-all
Expand Down
53 changes: 0 additions & 53 deletions bin/socorro_aws_s3.sh

This file was deleted.

Loading

0 comments on commit a6159f3

Please sign in to comment.