Remove migration-tool Django app and UI page (#708)

* remove migration_tool app (#687)
* remove migration-tool page (#699)
This commit is contained in:
Joey Orlando 2022-10-26 15:47:08 +02:00 committed by GitHub
parent 8c1f1499cf
commit 94934bf3e4
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
19 changed files with 0 additions and 1324 deletions

View file

@ -1,7 +0,0 @@
# amixr api url
REQUEST_URL = "https://amixr.io/api/v1"
# migration status
NOT_STARTED = "not_started"
IN_PROGRESS = "in_progress"
FINISHED = "finished"

View file

@ -1,33 +0,0 @@
# Generated by Django 3.2.5 on 2022-05-31 14:46
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('alerts', '0001_squashed_initial'),
]
operations = [
migrations.CreateModel(
name='AmixrMigrationTaskStatus',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('task_id', models.CharField(db_index=True, max_length=500)),
('name', models.CharField(max_length=500)),
('started_at', models.DateTimeField(auto_now_add=True)),
('is_finished', models.BooleanField(default=False)),
],
),
migrations.CreateModel(
name='LockedAlert',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('alert', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='migrator_lock', to='alerts.alert')),
],
),
]

View file

@ -1,22 +0,0 @@
# Generated by Django 3.2.5 on 2022-05-31 14:46
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('user_management', '0001_squashed_initial'),
('migration_tool', '0001_squashed_initial'),
]
operations = [
migrations.AddField(
model_name='amixrmigrationtaskstatus',
name='organization',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='migration_tasks', to='user_management.organization'),
),
]

View file

@ -1,2 +0,0 @@
from .amixr_migration_task_status import AmixrMigrationTaskStatus # noqa: F401
from .locked_alert import LockedAlert # noqa: F401

View file

@ -1,27 +0,0 @@
from celery import uuid as celery_uuid
from django.db import models
class AmixrMigrationTaskStatusQuerySet(models.QuerySet):
def get_migration_task_id(self, organization_id, name):
migrate_schedules_task_id = celery_uuid()
self.model(organization_id=organization_id, name=name, task_id=migrate_schedules_task_id).save()
return migrate_schedules_task_id
class AmixrMigrationTaskStatus(models.Model):
objects = AmixrMigrationTaskStatusQuerySet.as_manager()
task_id = models.CharField(max_length=500, db_index=True)
name = models.CharField(max_length=500)
organization = models.ForeignKey(
to="user_management.Organization",
related_name="migration_tasks",
on_delete=models.deletion.CASCADE,
)
started_at = models.DateTimeField(auto_now_add=True)
is_finished = models.BooleanField(default=False)
def update_status_to_finished(self):
self.is_finished = True
self.save(update_fields=["is_finished"])

View file

@ -1,5 +0,0 @@
from django.db import models
class LockedAlert(models.Model):
alert = models.OneToOneField("alerts.Alert", on_delete=models.CASCADE, related_name="migrator_lock")

View file

@ -1,612 +0,0 @@
import logging
from celery.utils.log import get_task_logger
from django.apps import apps
from django.conf import settings
from django.db import transaction
from django.utils import timezone
from rest_framework import exceptions
from apps.alerts.models import Alert, AlertGroup, AlertReceiveChannel, ResolutionNote
from apps.migration_tool.models import AmixrMigrationTaskStatus, LockedAlert
from apps.migration_tool.utils import convert_string_to_datetime, get_data_with_respect_to_pagination
from apps.public_api.serializers import PersonalNotificationRuleSerializer
from common.custom_celery_tasks import shared_dedicated_queue_retry_task
logger = get_task_logger(__name__)
logger.setLevel(logging.DEBUG)
@shared_dedicated_queue_retry_task(
autoretry_for=(Exception,), retry_backoff=True, max_retries=0 if settings.DEBUG else None
)
def start_migration_from_old_amixr(api_token, organization_id, user_id):
logger.info(f"Start migration task from amixr for organization {organization_id}")
users = get_users(organization_id, api_token)
migrate_schedules_task_id = AmixrMigrationTaskStatus.objects.get_migration_task_id(
organization_id=organization_id, name=migrate_schedules.name
)
migrate_schedules.apply_async(
(api_token, organization_id, user_id, users),
task_id=migrate_schedules_task_id,
countdown=5,
)
start_migration_user_data_task_id = AmixrMigrationTaskStatus.objects.get_migration_task_id(
organization_id=organization_id, name=start_migration_user_data.name
)
start_migration_user_data.apply_async(
(api_token, organization_id, users),
task_id=start_migration_user_data_task_id,
)
logger.info(f"Start 'start_migration_from_old_amixr' task for organization {organization_id}")
def get_users(organization_id, api_token):
Organization = apps.get_model("user_management", "Organization")
organization = Organization.objects.get(pk=organization_id)
# get all users from old amixr
old_users = get_data_with_respect_to_pagination(api_token, "users")
old_users_emails = [old_user["email"] for old_user in old_users]
# find users in Grafana OnCall by email
grafana_users = organization.users.filter(email__in=old_users_emails).values("email", "id")
grafana_users_dict = {
gu["email"]: {
"id": gu["id"],
}
for gu in grafana_users
}
users = {}
for old_user in old_users:
if old_user["email"] in grafana_users_dict:
users[old_user["id"]] = grafana_users_dict[old_user["email"]]
users[old_user["id"]]["old_verified_phone_number"] = old_user.get("verified_phone_number")
users[old_user["id"]]["old_public_primary_key"] = old_user["id"]
# Example result:
# users = {
# "OLD_PUBLIC_PK": {
# "id": 1, # user pk in OnCall db
# "old_verified_phone_number": "1234",
# "old_public_primary_key": "OLD_PUBLIC_PK",
# },
# ...
# }
return users
@shared_dedicated_queue_retry_task(
autoretry_for=(Exception,), retry_backoff=True, max_retries=0 if settings.DEBUG else None
)
def migrate_schedules(api_token, organization_id, user_id, users):
logger.info(f"Started migration schedules for organization {organization_id}")
OnCallScheduleICal = apps.get_model("schedules", "OnCallScheduleICal")
Organization = apps.get_model("user_management", "Organization")
organization = Organization.objects.get(pk=organization_id)
schedules = get_data_with_respect_to_pagination(api_token, "schedules")
existing_schedules_names = set(organization.oncall_schedules.values_list("name", flat=True))
created_schedules = {}
for schedule in schedules:
if not schedule["ical_url"] or schedule["name"] in existing_schedules_names:
continue
new_schedule = OnCallScheduleICal(
organization=organization,
name=schedule["name"],
ical_url_primary=schedule["ical_url"],
team_id=None,
)
new_schedule.save()
created_schedules[schedule["id"]] = {
"id": new_schedule.pk,
}
# Example result:
# created_schedules = {
# "OLD_PUBLIC_PK": {
# "id": 1, # schedule pk in OnCall db
# },
# ...
# }
migrate_integrations_task_id = AmixrMigrationTaskStatus.objects.get_migration_task_id(
organization_id=organization_id, name=migrate_integrations.name
)
migrate_integrations.apply_async(
(api_token, organization_id, user_id, created_schedules, users), task_id=migrate_integrations_task_id
)
current_task_id = migrate_schedules.request.id
AmixrMigrationTaskStatus.objects.get(task_id=current_task_id).update_status_to_finished()
logger.info(f"Finished migration schedules for organization {organization_id}")
@shared_dedicated_queue_retry_task(
autoretry_for=(Exception,), retry_backoff=True, max_retries=0 if settings.DEBUG else None
)
def migrate_integrations(api_token, organization_id, user_id, created_schedules, users):
logger.info(f"Started migration integrations for organization {organization_id}")
Organization = apps.get_model("user_management", "Organization")
organization = Organization.objects.get(pk=organization_id)
integrations = get_data_with_respect_to_pagination(api_token, "integrations")
existing_integrations_names = set(organization.alert_receive_channels.values_list("verbal_name", flat=True))
for integration in integrations:
if integration["name"] in existing_integrations_names:
continue
try:
integration_type = [
key
for key, value in AlertReceiveChannel.INTEGRATIONS_TO_REVERSE_URL_MAP.items()
if value == integration["type"]
][0]
except IndexError:
continue
if integration_type not in AlertReceiveChannel.WEB_INTEGRATION_CHOICES:
continue
new_integration = AlertReceiveChannel.create(
organization=organization,
verbal_name=integration["name"],
integration=integration_type,
author_id=user_id,
slack_title_template=integration["templates"]["slack"]["title"],
slack_message_template=integration["templates"]["slack"]["message"],
slack_image_url_template=integration["templates"]["slack"]["image_url"],
sms_title_template=integration["templates"]["sms"]["title"],
phone_call_title_template=integration["templates"]["phone_call"]["title"],
web_title_template=integration["templates"]["web"]["title"],
web_message_template=integration["templates"]["web"]["message"],
web_image_url_template=integration["templates"]["web"]["image_url"],
email_title_template=integration["templates"]["email"]["title"],
email_message_template=integration["templates"]["email"]["message"],
telegram_title_template=integration["templates"]["telegram"]["title"],
telegram_message_template=integration["templates"]["telegram"]["message"],
telegram_image_url_template=integration["templates"]["telegram"]["image_url"],
grouping_id_template=integration["templates"]["grouping_key"],
resolve_condition_template=integration["templates"]["resolve_signal"],
acknowledge_condition_template=integration["templates"]["acknowledge_signal"],
)
# collect integration data in a dict
integration_data = {
"id": new_integration.pk,
"verbal_name": new_integration.verbal_name,
"old_public_primary_key": integration["id"],
}
migrate_routes_task_id = AmixrMigrationTaskStatus.objects.get_migration_task_id(
organization_id=organization_id, name=migrate_routes.name
)
migrate_routes.apply_async(
(api_token, organization_id, users, created_schedules, integration_data),
task_id=migrate_routes_task_id,
countdown=3,
)
current_task_id = migrate_integrations.request.id
AmixrMigrationTaskStatus.objects.get(task_id=current_task_id).update_status_to_finished()
logger.info(f"Finished migration integrations for organization {organization_id}")
@shared_dedicated_queue_retry_task(
autoretry_for=(Exception,), retry_backoff=True, max_retries=0 if settings.DEBUG else None
)
def migrate_routes(api_token, organization_id, users, created_schedules, integration_data):
logger.info(f"Start migration routes for organization {organization_id}")
AlertReceiveChannel = apps.get_model("alerts", "AlertReceiveChannel")
ChannelFilter = apps.get_model("alerts", "ChannelFilter")
Organization = apps.get_model("user_management", "Organization")
organization = Organization.objects.get(pk=organization_id)
integration = AlertReceiveChannel.objects.filter(pk=integration_data["id"]).first()
if integration:
url = "routes?integration_id={}".format(integration_data["old_public_primary_key"])
routes = get_data_with_respect_to_pagination(api_token, url)
default_route = integration.channel_filters.get(is_default=True)
existing_chain_names = set(organization.escalation_chains.values_list("name", flat=True))
existing_route_filtering_term = set(integration.channel_filters.values_list("filtering_term", flat=True))
for route in routes:
is_default_route = route["is_the_last_route"]
filtering_term = route["routing_regex"]
if is_default_route:
escalation_chain_name = f"{integration_data['verbal_name'][:90]} - default"
else:
if filtering_term in existing_route_filtering_term:
continue
escalation_chain_name = f"{integration_data['verbal_name']} - {filtering_term}"[:100]
if escalation_chain_name in existing_chain_names:
escalation_chain = organization.escalation_chains.get(name=escalation_chain_name)
else:
escalation_chain = organization.escalation_chains.create(name=escalation_chain_name)
if is_default_route:
new_route = default_route
new_route.escalation_chain = escalation_chain
new_route.save(update_fields=["escalation_chain"])
else:
new_route = ChannelFilter(
alert_receive_channel_id=integration_data["id"],
escalation_chain_id=escalation_chain.pk,
filtering_term=filtering_term,
order=route["position"],
)
new_route.save()
route_data = {
"id": new_route.pk,
"old_public_primary_key": route["id"],
"escalation_chain": {
"id": escalation_chain.pk,
},
}
migrate_escalation_policies_task_id = AmixrMigrationTaskStatus.objects.get_migration_task_id(
organization_id=organization_id, name=migrate_escalation_policies.name
)
migrate_escalation_policies.apply_async(
(api_token, organization_id, users, created_schedules, route_data),
task_id=migrate_escalation_policies_task_id,
countdown=2,
)
start_migration_alert_groups_task_id = AmixrMigrationTaskStatus.objects.get_migration_task_id(
organization_id=organization_id, name=start_migration_alert_groups.name
)
start_migration_alert_groups.apply_async(
(api_token, organization_id, users, integration_data, route_data),
task_id=start_migration_alert_groups_task_id,
countdown=10,
)
current_task_id = migrate_routes.request.id
AmixrMigrationTaskStatus.objects.get(task_id=current_task_id).update_status_to_finished()
logger.info(f"Finished migration routes for organization {organization_id}")
@shared_dedicated_queue_retry_task(
autoretry_for=(Exception,), retry_backoff=True, max_retries=0 if settings.DEBUG else None
)
def migrate_escalation_policies(api_token, organization_id, users, created_schedules, route_data):
logger.info(f"Start migration escalation policies for organization {organization_id}")
EscalationChain = apps.get_model("alerts", "EscalationChain")
EscalationPolicy = apps.get_model("alerts", "EscalationPolicy")
escalation_chain = EscalationChain.objects.filter(pk=route_data["escalation_chain"]["id"]).first()
if escalation_chain and not escalation_chain.escalation_policies.exists():
url = "escalation_policies?route_id={}".format(route_data["old_public_primary_key"])
escalation_policies = get_data_with_respect_to_pagination(api_token, url)
for escalation_policy in escalation_policies:
try:
step_type = [
key
for key, value in EscalationPolicy.PUBLIC_STEP_CHOICES_MAP.items()
if value == escalation_policy["type"] and key in EscalationPolicy.PUBLIC_STEP_CHOICES
][0]
except IndexError:
continue
if step_type in EscalationPolicy.DEFAULT_TO_IMPORTANT_STEP_MAPPING and escalation_policy.get("important"):
step_type = EscalationPolicy.DEFAULT_TO_IMPORTANT_STEP_MAPPING[step_type]
notify_to_users_queue = []
if step_type == EscalationPolicy.STEP_NOTIFY_USERS_QUEUE:
notify_to_users_queue = [
users[user_old_public_pk]["id"]
for user_old_public_pk in escalation_policy.get("persons_to_notify_next_each_time", [])
if user_old_public_pk in users
]
elif step_type in [
EscalationPolicy.STEP_NOTIFY_MULTIPLE_USERS,
EscalationPolicy.STEP_NOTIFY_MULTIPLE_USERS_IMPORTANT,
]:
notify_to_users_queue = [
users[user_old_public_pk]["id"]
for user_old_public_pk in escalation_policy.get("persons_to_notify", [])
if user_old_public_pk in users
]
if step_type == EscalationPolicy.STEP_NOTIFY_IF_TIME:
notify_from_time = timezone.datetime.strptime(
escalation_policy.get("notify_if_time_from"), "%H:%M:%SZ"
).time()
notify_to_time = timezone.datetime.strptime(
escalation_policy.get("notify_if_time_to"), "%H:%M:%SZ"
).time()
else:
notify_from_time, notify_to_time = None, None
duration = escalation_policy.get("duration")
wait_delay = timezone.timedelta(seconds=duration) if duration else None
schedule_id = escalation_policy.get("notify_on_call_from_schedule")
notify_schedule_id = created_schedules.get(schedule_id, {}).get("id") if schedule_id else None
new_escalation_policy = EscalationPolicy(
step=step_type,
order=escalation_policy["position"],
escalation_chain=escalation_chain,
notify_schedule_id=notify_schedule_id,
wait_delay=wait_delay,
from_time=notify_from_time,
to_time=notify_to_time,
)
new_escalation_policy.save()
if notify_to_users_queue:
new_escalation_policy.notify_to_users_queue.set(notify_to_users_queue)
current_task_id = migrate_escalation_policies.request.id
AmixrMigrationTaskStatus.objects.get(task_id=current_task_id).update_status_to_finished()
logger.info(f"Finished migration escalation policies for organization {organization_id}")
@shared_dedicated_queue_retry_task(
autoretry_for=(Exception,), retry_backoff=True, max_retries=0 if settings.DEBUG else None
)
def start_migration_alert_groups(api_token, organization_id, users, integration_data, route_data):
logger.info(f"Start migration alert groups for organization {organization_id}")
ChannelFilter = apps.get_model("alerts", "ChannelFilter")
url = "incidents?route_id={}".format(route_data["old_public_primary_key"])
alert_groups = get_data_with_respect_to_pagination(api_token, url)
route = ChannelFilter.objects.filter(pk=route_data["id"]).first()
if route and not route.alert_groups.exists():
for alert_group in alert_groups:
migrate_alert_group_task_id = AmixrMigrationTaskStatus.objects.get_migration_task_id(
organization_id=organization_id, name=migrate_alert_group.name
)
migrate_alert_group.apply_async(
(api_token, organization_id, users, integration_data, route_data, alert_group),
task_id=migrate_alert_group_task_id,
)
current_task_id = start_migration_alert_groups.request.id
AmixrMigrationTaskStatus.objects.get(task_id=current_task_id).update_status_to_finished()
logger.info(f"Finished 'start_migration_alert_groups' for organization {organization_id}")
@shared_dedicated_queue_retry_task(
autoretry_for=(Exception,), retry_backoff=True, max_retries=0 if settings.DEBUG else None
)
def migrate_alert_group(api_token, organization_id, users, integration_data, route_data, alert_group_to_migrate):
logger.info(f"Start migration alert_group {alert_group_to_migrate['id']} for organization {organization_id}")
integration = AlertReceiveChannel.objects.get(pk=integration_data["id"])
resolve_by_user_id = None
acknowledged_by_user_id = None
if alert_group_to_migrate["resolved_by_user"]:
resolve_by_user_id = users.get(alert_group_to_migrate["resolved_by_user"], {}).get("id")
if alert_group_to_migrate["acknowledged_by_user"]:
acknowledged_by_user_id = users.get(alert_group_to_migrate["acknowledged_by_user"], {}).get("id")
new_group = AlertGroup.all_objects.create(
channel=integration,
channel_filter_id=route_data["id"],
resolved=True,
resolved_by=alert_group_to_migrate["resolved_by"],
resolved_by_user_id=resolve_by_user_id,
resolved_at=alert_group_to_migrate.get("resolved_at") or timezone.now(),
acknowledged=alert_group_to_migrate["acknowledged"],
acknowledged_by=alert_group_to_migrate["acknowledged_by"],
acknowledged_by_user_id=acknowledged_by_user_id,
acknowledged_at=alert_group_to_migrate.get("acknowledged_at"),
)
new_group.started_at = convert_string_to_datetime(alert_group_to_migrate["created_at"])
new_group.save(update_fields=["started_at"])
alert_group_data = {
"id": new_group.pk,
"old_public_primary_key": alert_group_to_migrate["id"],
}
start_migration_alerts_task_id = AmixrMigrationTaskStatus.objects.get_migration_task_id(
organization_id=organization_id, name=start_migration_alerts.name
)
start_migration_alerts.apply_async(
(api_token, organization_id, alert_group_data),
task_id=start_migration_alerts_task_id,
)
start_migration_logs_task_id = AmixrMigrationTaskStatus.objects.get_migration_task_id(
organization_id=organization_id, name=start_migration_logs.name
)
start_migration_logs.apply_async(
(api_token, organization_id, users, alert_group_data),
task_id=start_migration_logs_task_id,
countdown=5,
)
current_task_id = migrate_alert_group.request.id
AmixrMigrationTaskStatus.objects.get(task_id=current_task_id).update_status_to_finished()
logger.info(f"Finished migration alert_group {alert_group_to_migrate['id']} for organization {organization_id}")
@shared_dedicated_queue_retry_task(
autoretry_for=(Exception,), retry_backoff=True, max_retries=0 if settings.DEBUG else None
)
def start_migration_alerts(api_token, organization_id, alert_group_data):
logger.info(
f"Start migration alerts for alert_group {alert_group_data['old_public_primary_key']} "
f"for organization {organization_id}"
)
AlertGroup = apps.get_model("alerts", "AlertGroup")
alert_group = AlertGroup.all_objects.get(pk=alert_group_data["id"])
if not alert_group.alerts.exists():
url = "alerts?incident_id={}".format(alert_group_data["old_public_primary_key"])
alerts = get_data_with_respect_to_pagination(api_token, url)
for alert in alerts:
migrate_alerts_task_id = AmixrMigrationTaskStatus.objects.get_migration_task_id(
organization_id=organization_id, name=migrate_alert.name
)
migrate_alert.apply_async(
(organization_id, alert_group_data, alert),
task_id=migrate_alerts_task_id,
)
current_task_id = start_migration_alerts.request.id
AmixrMigrationTaskStatus.objects.get(task_id=current_task_id).update_status_to_finished()
logger.info(
f"Finished 'start_migration_alerts' for alert_group {alert_group_data['old_public_primary_key']} "
f"for organization {organization_id}"
)
@shared_dedicated_queue_retry_task(
autoretry_for=(Exception,), retry_backoff=True, max_retries=0 if settings.DEBUG else None
)
def migrate_alert(organization_id, alert_group_data, alert):
logger.info(f"Start migration alert {alert['id']} for organization {organization_id}")
with transaction.atomic():
new_alert = Alert(
title=alert["title"],
message=alert["message"],
image_url=alert["image_url"],
link_to_upstream_details=alert["link_to_upstream_details"],
group_id=alert_group_data["id"],
integration_unique_data=alert["payload"],
raw_request_data=alert["payload"],
)
new_alert.save()
LockedAlert.objects.create(alert=new_alert)
new_alert.created_at = convert_string_to_datetime(alert["created_at"])
new_alert.save(update_fields=["created_at"])
current_task_id = migrate_alert.request.id
AmixrMigrationTaskStatus.objects.get(task_id=current_task_id).update_status_to_finished()
logger.info(f"Finished migration alert {alert['id']} for organization {organization_id}")
@shared_dedicated_queue_retry_task(
autoretry_for=(Exception,), retry_backoff=True, max_retries=0 if settings.DEBUG else None
)
def start_migration_logs(api_token, organization_id, users, alert_group_data):
logger.info(f"Start migration logs for alert_group {alert_group_data['id']} for organization {organization_id}")
url = "incident_logs?incident_id={}".format(alert_group_data["old_public_primary_key"])
alert_group_logs = get_data_with_respect_to_pagination(api_token, url)
for log in alert_group_logs:
migrate_logs_task_id = AmixrMigrationTaskStatus.objects.get_migration_task_id(
organization_id=organization_id, name=migrate_log.name
)
migrate_log.apply_async(
(organization_id, users, alert_group_data, log),
task_id=migrate_logs_task_id,
)
current_task_id = start_migration_logs.request.id
AmixrMigrationTaskStatus.objects.get(task_id=current_task_id).update_status_to_finished()
logger.info(
f"Finished 'start_migration_logs' for alert_group {alert_group_data['id']} for organization {organization_id}"
)
@shared_dedicated_queue_retry_task(
autoretry_for=(Exception,), retry_backoff=True, max_retries=0 if settings.DEBUG else None
)
def migrate_log(organization_id, users, alert_group_data, log):
logger.info(f"Start migration log for alert_group {alert_group_data['id']} for organization {organization_id}")
log_author_id = users.get(log["author"], {}).get("id")
new_resolution_note = ResolutionNote(
author_id=log_author_id,
message_text=log["text"],
alert_group_id=alert_group_data["id"],
)
new_resolution_note.save()
new_resolution_note.created_at = convert_string_to_datetime(log["created_at"])
new_resolution_note.save(update_fields=["created_at"])
current_task_id = migrate_log.request.id
AmixrMigrationTaskStatus.objects.get(task_id=current_task_id).update_status_to_finished()
@shared_dedicated_queue_retry_task(
autoretry_for=(Exception,), retry_backoff=True, max_retries=0 if settings.DEBUG else None
)
def start_migration_user_data(api_token, organization_id, users):
logger.info(f"Start migration user data for organization {organization_id}")
for user in users:
user_data = users[user]
migrate_user_data_task_id = AmixrMigrationTaskStatus.objects.get_migration_task_id(
organization_id=organization_id, name=migrate_user_data.name
)
migrate_user_data.apply_async(
(api_token, organization_id, user_data),
task_id=migrate_user_data_task_id,
)
current_task_id = start_migration_user_data.request.id
AmixrMigrationTaskStatus.objects.get(task_id=current_task_id).update_status_to_finished()
logger.info(f"Finished 'start_migration_user_data' task for organization {organization_id}")
@shared_dedicated_queue_retry_task(
autoretry_for=(Exception,), retry_backoff=True, max_retries=0 if settings.DEBUG else None
)
def migrate_user_data(api_token, organization_id, user_to_migrate):
logger.info(f"Start migration user {user_to_migrate['id']} for organization {organization_id}")
User = apps.get_model("user_management", "User")
UserNotificationPolicy = apps.get_model("base", "UserNotificationPolicy")
user = User.objects.filter(pk=user_to_migrate["id"], organization_id=organization_id).first()
if user:
if not user.verified_phone_number and user_to_migrate["old_verified_phone_number"]:
user.save_verified_phone_number(user_to_migrate["old_verified_phone_number"])
url = "personal_notification_rules?user_id={}".format(user_to_migrate["old_public_primary_key"])
user_notification_policies = get_data_with_respect_to_pagination(api_token, url)
notification_policies_to_create = []
existing_notification_policies_ids = list(user.notification_policies.all().values_list("pk", flat=True))
for notification_policy in user_notification_policies:
try:
step, notification_channel = PersonalNotificationRuleSerializer._type_to_step_and_notification_channel(
notification_policy["type"],
)
except exceptions.ValidationError:
continue
new_notification_policy = UserNotificationPolicy(
user=user,
important=notification_policy["important"],
step=step,
order=notification_policy["position"],
)
if step == UserNotificationPolicy.Step.NOTIFY:
new_notification_policy.notify_by = notification_channel
if step == UserNotificationPolicy.Step.WAIT:
duration = notification_policy.get("duration")
wait_delay = timezone.timedelta(seconds=duration) if duration else UserNotificationPolicy.FIVE_MINUTES
new_notification_policy.wait_delay = wait_delay
notification_policies_to_create.append(new_notification_policy)
UserNotificationPolicy.objects.bulk_create(notification_policies_to_create, batch_size=5000)
user.notification_policies.filter(pk__in=existing_notification_policies_ids).delete()
current_task_id = migrate_user_data.request.id
AmixrMigrationTaskStatus.objects.get(task_id=current_task_id).update_status_to_finished()
logger.info(f"Finished migration user {user_to_migrate['id']} for organization {organization_id}")

View file

@ -1,12 +0,0 @@
from common.api_helpers.optional_slash_router import optional_slash_path
from .views.customers_migration_tool import MigrateAPIView, MigrationPlanAPIView, MigrationStatusAPIView
app_name = "migration-tool"
urlpatterns = [
optional_slash_path("amixr_migration_plan", MigrationPlanAPIView.as_view(), name="amixr_migration_plan"),
optional_slash_path("migrate_from_amixr", MigrateAPIView.as_view(), name="migrate_from_amixr"),
optional_slash_path("amixr_migration_status", MigrationStatusAPIView.as_view(), name="amixr_migration_status"),
]

View file

@ -1,35 +0,0 @@
import requests
from django.utils import timezone
from apps.migration_tool.constants import REQUEST_URL
class APIResponseException(Exception):
pass
def get_data_with_respect_to_pagination(api_token, endpoint):
def fetch(url):
response = requests.get(url, headers={"AUTHORIZATION": api_token})
if response.status_code != 200:
raise APIResponseException(f"Status code: {response.status_code}, Data: {response.content}")
return response.json()
data = fetch(f"{REQUEST_URL}/{endpoint}")
results = data["results"]
while data["next"]:
data = fetch(data["next"])
new_results = data["results"]
results.extend(new_results)
return results
def convert_string_to_datetime(dt_str):
try:
dt = timezone.datetime.strptime(dt_str, "%Y-%m-%dT%X.%f%z")
except ValueError:
dt = timezone.datetime.strptime(dt_str, "%Y-%m-%dT%XZ")
return dt

View file

@ -1,186 +0,0 @@
import logging
import requests
from rest_framework import status
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework.views import APIView
from apps.alerts.models import AlertReceiveChannel
from apps.api.permissions import IsAdmin, MethodPermission
from apps.auth_token.auth import PluginAuthentication
from apps.migration_tool.constants import FINISHED, IN_PROGRESS, NOT_STARTED, REQUEST_URL
from apps.migration_tool.tasks import start_migration_from_old_amixr
from apps.migration_tool.utils import get_data_with_respect_to_pagination
from common.api_helpers.exceptions import BadRequest
logger = logging.getLogger(__name__)
class MigrationPlanAPIView(APIView):
authentication_classes = (PluginAuthentication,)
permission_classes = (IsAuthenticated, MethodPermission)
method_permissions = {IsAdmin: ("POST",)}
def post(self, request):
api_token = request.data.get("token", None)
if api_token is None:
raise BadRequest(detail="API token is required")
organization = request.auth.organization
if organization.is_amixr_migration_started:
raise BadRequest(detail="Migration from Amixr has already been started")
# check token
response = requests.get(f"{REQUEST_URL}/users", headers={"AUTHORIZATION": api_token})
if response.status_code == status.HTTP_403_FORBIDDEN:
raise BadRequest(detail="Invalid token")
# Just not to re-make the frontend...
USERS_NOT_TO_MIGRATE_KEY = (
"Users WON'T be migrated (couldn't find those users in the Grafana Cloud, ask "
"them to sign up if you want their data to be migrated and re-build the migration plan)"
)
USERS_TO_MIGRATE = "Users will be migrated"
INTEGRATIONS_TO_MIGRATE = "Integrations to migrate"
INTEGRATIONS_COUNT = "Integrations count"
ROUTES_COUNT = "Routes count"
ESCALATIONS_POLICIES_COUNT = "Escalation policies count"
CALENDARS_COUNT = "Calendars count"
migration_plan = {
USERS_TO_MIGRATE: [],
USERS_NOT_TO_MIGRATE_KEY: [],
INTEGRATIONS_TO_MIGRATE: [],
INTEGRATIONS_COUNT: 0,
ROUTES_COUNT: 0,
ESCALATIONS_POLICIES_COUNT: 0,
CALENDARS_COUNT: 0,
}
logger.info(f"migration plan for organization {organization.pk}: get users")
users = get_data_with_respect_to_pagination(api_token, "users")
logger.info(f"migration plan for organization {organization.pk}: got users")
org_users = organization.users.values_list("email", flat=True)
for user in users:
if user["email"] in org_users:
migration_plan[USERS_TO_MIGRATE].append(user["email"])
else:
migration_plan[USERS_NOT_TO_MIGRATE_KEY].append(user["email"])
logger.info(f"migration plan for organization {organization.pk}: get integrations")
integrations = get_data_with_respect_to_pagination(api_token, "integrations")
logger.info(f"migration plan for organization {organization.pk}: got integrations")
existing_integrations_names = set(organization.alert_receive_channels.values_list("verbal_name", flat=True))
integrations_to_migrate_public_pk = []
for integration in integrations:
if integration["name"] in existing_integrations_names:
continue
try:
integration_type = [
key
for key, value in AlertReceiveChannel.INTEGRATIONS_TO_REVERSE_URL_MAP.items()
if value == integration["type"]
][0]
except IndexError:
continue
if integration_type not in AlertReceiveChannel.WEB_INTEGRATION_CHOICES:
continue
migration_plan[INTEGRATIONS_TO_MIGRATE].append(integration["name"])
integrations_to_migrate_public_pk.append(integration["id"])
migration_plan[INTEGRATIONS_COUNT] = len(migration_plan[INTEGRATIONS_TO_MIGRATE])
routes_to_migrate_public_pk = []
logger.info(f"migration plan for organization {organization.pk}: get routes")
routes = get_data_with_respect_to_pagination(api_token, "routes")
logger.info(f"migration plan for organization {organization.pk}: got routes")
for route in routes:
if route["integration_id"] in integrations_to_migrate_public_pk:
migration_plan[ROUTES_COUNT] += 1
routes_to_migrate_public_pk.append(route["id"])
logger.info(f"migration plan for organization {organization.pk}: get escalation_policies")
escalation_policies = get_data_with_respect_to_pagination(api_token, "escalation_policies")
logger.info(f"migration plan for organization {organization.pk}: got escalation_policies")
for escalation_policy in escalation_policies:
if escalation_policy["route_id"] in routes_to_migrate_public_pk:
migration_plan[ESCALATIONS_POLICIES_COUNT] += 1
logger.info(f"migration plan for organization {organization.pk}: get schedules")
schedules = get_data_with_respect_to_pagination(api_token, "schedules")
logger.info(f"migration plan for organization {organization.pk}: got schedules")
existing_schedules_names = set(organization.oncall_schedules.values_list("name", flat=True))
for schedule in schedules:
if not schedule["ical_url"] or schedule["name"] in existing_schedules_names:
continue
migration_plan[CALENDARS_COUNT] += 1
return Response(migration_plan)
class MigrateAPIView(APIView):
authentication_classes = (PluginAuthentication,)
permission_classes = (IsAuthenticated, IsAdmin)
def post(self, request):
api_token = request.data.get("token", None)
if api_token is None:
raise BadRequest(detail="API token is required")
organization = request.auth.organization
if organization.is_amixr_migration_started:
raise BadRequest(detail="Migration from Amixr has already been started")
# check token
response = requests.get(f"{REQUEST_URL}/users", headers={"AUTHORIZATION": api_token})
if response.status_code == status.HTTP_403_FORBIDDEN:
raise BadRequest(detail="Invalid token")
organization.is_amixr_migration_started = True
organization.save(update_fields=["is_amixr_migration_started"])
organization_id = organization.pk
user_id = request.user.pk
# start migration process
start_migration_from_old_amixr.delay(api_token=api_token, organization_id=organization_id, user_id=user_id)
return Response(status=status.HTTP_200_OK)
class MigrationStatusAPIView(APIView):
authentication_classes = (PluginAuthentication,)
permission_classes = (IsAuthenticated, IsAdmin)
def get(self, request):
organization = request.auth.organization
migration_status = self.get_migration_status(organization)
endpoints_list = self.get_endpoints_list(organization)
return Response(
{"migration_status": migration_status, "endpoints_list": endpoints_list}, status=status.HTTP_200_OK
)
def get_migration_status(self, organization):
migration_status = NOT_STARTED
if organization.is_amixr_migration_started:
unfinished_tasks_exist = organization.migration_tasks.filter(is_finished=False).exists()
if unfinished_tasks_exist:
migration_status = IN_PROGRESS
else:
migration_status = FINISHED
return migration_status
def get_endpoints_list(self, organization):
integrations = organization.alert_receive_channels.filter(team_id__isnull=True)
endpoints_list = []
for integration in integrations:
integration_endpoint = f"{integration.verbal_name}, new endpoint: {integration.integration_url}"
endpoints_list.append(integration_endpoint)
return endpoints_list

View file

@ -37,7 +37,6 @@ urlpatterns = [
path("integrations/v1/", include("apps.integrations.urls", namespace="integrations")),
path("twilioapp/", include("apps.twilioapp.urls")),
path("api/v1/", include("apps.public_api.urls", namespace="api-public")),
path("api/internal/v1/", include("apps.migration_tool.urls", namespace="migration-tool")),
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
if settings.FEATURE_SLACK_INTEGRATION_ENABLED:

View file

@ -201,7 +201,6 @@ INSTALLED_APPS = [
"apps.public_api",
"apps.grafana_plugin",
"apps.grafana_plugin_management",
"apps.migration_tool",
"corsheaders",
"debug_toolbar",
"social_django",

View file

@ -10,7 +10,6 @@ import IncidentsPage2 from 'pages/incidents/Incidents';
import IntegrationsPage2 from 'pages/integrations/Integrations';
import LiveSettingsPage from 'pages/livesettings/LiveSettingsPage';
import MaintenancePage2 from 'pages/maintenance/Maintenance';
import MigrationTool from 'pages/migration-tool/MigrationTool';
import OrganizationLogPage2 from 'pages/organization-logs/OrganizationLog';
import OutgoingWebhooks2 from 'pages/outgoing_webhooks/OutgoingWebhooks';
import SchedulePage from 'pages/schedule/Schedule';
@ -125,13 +124,6 @@ export const pages: PageDefinition[] = [
text: 'Org Logs',
hideFromTabs: true,
},
{
component: MigrationTool,
icon: 'import',
id: 'migration-tool',
text: 'Migrate From Amixr.IO',
hideFromTabs: true,
},
{
component: CloudPage,
icon: 'cloud',

View file

@ -1,9 +0,0 @@
.root {
margin-top: 24px;
width: 1000px;
}
.root ul,
.root ol {
padding-left: 20px;
}

View file

@ -1,364 +0,0 @@
import React from 'react';
import { Button, LoadingPlaceholder, Tag, TextArea } from '@grafana/ui';
import cn from 'classnames/bind';
import { observer } from 'mobx-react';
import Emoji from 'react-emoji-render';
import Text from 'components/Text/Text';
import { makeRequest } from 'network';
import { withMobXProviderContext } from 'state/withStore';
import { showApiError } from 'utils';
import apiTokensImg from './img/api-tokens.png';
import styles from './MigrationTool.module.css';
const cx = cn.bind(styles);
interface MigrationToolProps {}
interface MigrationToolState {
migrationStatus?: MIGRATION_STATUS;
migrationInitiated?: boolean;
migrationPlan?: any;
apiKey: string;
endpointsList?: string[];
}
enum MIGRATION_STATUS {
NOT_STARTED = 'not_started',
IN_PROGRESS = 'in_progress',
FINISHED = 'finished',
}
function scrollIntoView(node: HTMLDivElement | null) {
if (node) {
node.scrollIntoView({ behavior: 'smooth' });
}
}
@observer
class MigrationToolPage extends React.Component<MigrationToolProps, MigrationToolState> {
state: MigrationToolState = {
migrationStatus: undefined,
migrationInitiated: false,
migrationPlan: undefined,
apiKey: '',
};
statusTimer: ReturnType<typeof setTimeout>;
async componentDidMount() {
this.checkStatus();
}
render() {
const { migrationStatus, migrationInitiated, migrationPlan, apiKey, endpointsList } = this.state;
return (
<div className={cx('root')}>
<p>
<Text.Title level={3} className={cx('title')}>
Migrate From Amixr.IO
</Text.Title>
</p>
<div className={cx('desc')}>
<p>Dear Amixr.IO customers!</p>
<p>
Amixr Inc. was acquired by Grafana Labs in 2021. Grafana OnCall is a very similar product, and most of you
shouldnt notice a lot of difference. We used the chance and made a few core improvements, which we hope
will make your experience much better.
</p>
<p>
We understand that it will put additional work on your shoulders. We did our best to prepare a migration
tool to assist you with the migration process.
</p>
<p>
Unfortunately, we no longer plan to support Amixr.IO and expect to shut down Amixr.IO to be able to focus on
Grafana OnCall. The timeline is:
<ul>
<li>1 June 2022 - 31 June 2022 the migration tool will be available.</li>
<li>30st of June Amixr.IO and all services located in this domain will be disabled.</li>
</ul>
</p>
<p>
How to prepare for the migration
<ol>
<li>
Ask all users from your Amixr.IO workspace to{' '}
<a href="https://grafana.com/auth/sign-up/create-user" target="_blank" rel="noreferrer">
sign up
</a>{' '}
in the Grafana Cloud.
</li>
<li>Request the migration plan.</li>
</ol>
</p>
<p>
For any technical assistance please reach out to our team in{' '}
<a href="https://slack.grafana.com/" target="_blank" rel="noreferrer">
Grafana Slack channel #grafana-oncall
</a>
. Well be happy to give you a hand and help you with migration on a call.
</p>
<p>For any questions related to pricing, or payments, please reach out to our sales:</p>
<p>
For any other questions:
<ul>
<li>
Matvey Kukuy (ex-CEO of Amixr):{' '}
<a href="mailto:matvey.kukuy@grafana.com" target="_blank" rel="noreferrer">
matvey.kukuy@grafana.com
</a>
</li>
<li>
Ildar Iskhakov (ex-CTO of Amixr):{' '}
<a href="mailto:ildar.iskhakov@grafana.com" target="_blank" rel="noreferrer">
ildar.iskhakov@grafana.com
</a>
</li>
</ul>
</p>
</div>
{migrationStatus ? (
<>
{migrationStatus === MIGRATION_STATUS.NOT_STARTED && (
<>
<hr />
<div className={cx('initiate-migration')}>
<p>
<Text.Title level={3}>Initiate migration to Grafana OnCall</Text.Title>
</p>
<p>Find API key in your Amixr.IO workspace -&gt; bottom of the Settings page:</p>
<p>
<img style={{ width: '600px' }} src={apiTokensImg} />
</p>
<p>Add Amixr.IO API Key to the field below:</p>
<p>
<TextArea
disabled={Boolean(migrationPlan)}
style={{ width: '600px' }}
rows={4}
onChange={(event) => {
this.setState({ apiKey: event.currentTarget.value });
}}
/>
</p>
<p>
<Button
variant="secondary"
onClick={this.handleInitiateMigrationClick}
disabled={!apiKey.length || migrationInitiated}
>
Next Step: Build the migration plan
</Button>
</p>
</div>
</>
)}
{migrationInitiated && (
<>
<hr />
{migrationPlan ? (
<div className={cx('migration-plan')} ref={scrollIntoView}>
<p>
<Text.Title level={3}>Your migration plan</Text.Title>
</p>
{Object.keys(migrationPlan).map((key: string) => {
const item = migrationPlan[key];
return (
<p key={key}>
{key}:{' '}
{Array.isArray(item) && item.length ? (
<ul>
{item.map((subItem) => (
<li key={subItem}>
<Emoji text={subItem || ''} />
</li>
))}
</ul>
) : Array.isArray(item) || !item ? (
''
) : (
item
)}
</p>
);
})}
<p>
<Tag colorIndex={7} name="Attention" />{' '}
<Text>
Migration will be applied to the "General" team. Once you perform the migration there won't be
possible to migrate data to the current Grafana OnCall workspace again. It's a 1-time operation.
</Text>
</p>
<p>
<Tag colorIndex={7} name="Attention" />{' '}
<Text>
Existing notification policies for migrated users will be replaced with policies from Amixr.io.
OnCall does not support notifications via email.
</Text>
</p>
<p>
<Tag colorIndex={7} name="Attention" />{' '}
<Text>
Only "ical" schedules will be migrated. User names in your calendars should not be prefixed with
'@', use bare usernames or emails.
</Text>
</p>
<p>
<Tag colorIndex={1} name="Info" />{' '}
<Text>
Unfortunately Grafana OnCall does not support Sentry OAuth-based integration. It wont be
migrated. Please use Webhook integration instead and configure it after your migration will
finish. Were sorry for the inconvenience.
</Text>
</p>
<p>
<Tag colorIndex={1} name="Info" />{' '}
<Text>
If you use terraform, please check our new terraform provider. We slightly changed it.
</Text>
</p>
<p>
<Button
variant="secondary"
onClick={this.handlePerformMigrationClick}
disabled={migrationStatus !== MIGRATION_STATUS.NOT_STARTED}
>
Next Step: Perform migration
</Button>
</p>
</div>
) : (
<LoadingPlaceholder text="Loading Migration Plan..." />
)}
</>
)}
{(migrationStatus === MIGRATION_STATUS.IN_PROGRESS || migrationStatus === MIGRATION_STATUS.FINISHED) && (
<>
<hr />
<div className={cx('migration-status')} ref={scrollIntoView}>
<p>
{migrationStatus === MIGRATION_STATUS.FINISHED ? (
<Text.Title level={3} type="success">
Migration completed!
</Text.Title>
) : (
<Text.Title level={3}>Migration started</Text.Title>
)}
</p>
<p>
{migrationStatus === MIGRATION_STATUS.IN_PROGRESS && <LoadingPlaceholder text="Migrating..." />}
</p>
<p>
{migrationStatus === MIGRATION_STATUS.IN_PROGRESS &&
"It may take a few hours (closing the tab won't stop the migration). "}
For now you need to do some actions to avoid collecting data in old Amixr.IO:
<ol>
<li>Update endpoints for alert sources:</li>
{endpointsList ? (
<ul>
{endpointsList.map((item) => (
<li key={item}>
<Emoji text={item || ''} />{' '}
</li>
))}
</ul>
) : null}
<li>Remove Slack & Telegram integrations in Amixr.IO.</li>
<li>Connect Slack & Telegram in Grafana OnCall.</li>
<li>Announce the start of migration to your colleagues!</li>
</ol>
</p>
<p>
<Tag colorIndex={1} name="Info" />{' '}
<Text type="secondary">
There is no way to re-migrate your Amixr.IO workspace to current OnCall workspace again. In case
you need to repeat the migration please use new Grafana Cloud workspace.
</Text>
</p>
</div>
</>
)}
</>
) : (
<LoadingPlaceholder text={'Loading migration status'} />
)}
</div>
);
}
handleInitiateMigrationClick = () => {
const { apiKey } = this.state;
this.setState({ migrationInitiated: true }, () => {
makeRequest('/amixr_migration_plan/', { method: 'POST', data: { token: apiKey } })
.then((data) => {
this.setState({ migrationPlan: data });
})
.catch((error) => {
this.setState({ migrationInitiated: false });
throw error;
})
.catch(showApiError);
});
};
handlePerformMigrationClick = () => {
const { apiKey } = this.state;
this.setState({ migrationStatus: MIGRATION_STATUS.IN_PROGRESS }, () => {
makeRequest('/migrate_from_amixr/', { method: 'POST', data: { token: apiKey } })
.then(this.startMigrationStatusPolling)
.catch((error) => {
this.setState({ migrationStatus: MIGRATION_STATUS.NOT_STARTED });
throw error;
})
.catch(showApiError);
});
};
startMigrationStatusPolling = () => {
this.statusTimer = setInterval(this.checkStatus, 20000);
};
stopMigrationStatusPolling = () => {
clearInterval(this.statusTimer);
};
checkStatus = () => {
const { apiKey } = this.state;
makeRequest('/amixr_migration_status/', { data: { token: apiKey } })
.then(({ migration_status, endpoints_list }) => {
this.setState({ migrationStatus: migration_status, endpointsList: endpoints_list });
switch (migration_status) {
case 'in_progress':
if (!this.statusTimer) {
this.startMigrationStatusPolling();
}
break;
}
})
.catch((error) => {
this.stopMigrationStatusPolling();
throw error;
})
.catch(showApiError);
};
componentWillUnmount() {
this.stopMigrationStatusPolling();
}
}
export default withMobXProviderContext(MigrationToolPage);

Binary file not shown.

Before

Width:  |  Height:  |  Size: 45 KiB