diff --git a/peeringdb_server/__init__.py b/peeringdb_server/__init__.py
index a53dbd54..6eade01d 100644
--- a/peeringdb_server/__init__.py
+++ b/peeringdb_server/__init__.py
@@ -1 +1 @@
-default_app_config = 'peeringdb_server.apps.PeeringDBServerAppConfig'
+default_app_config = "peeringdb_server.apps.PeeringDBServerAppConfig"
diff --git a/peeringdb_server/admin.py b/peeringdb_server/admin.py
index 9f709514..a36bdda6 100644
--- a/peeringdb_server/admin.py
+++ b/peeringdb_server/admin.py
@@ -23,7 +23,11 @@
from django.template.response import TemplateResponse
from django.db.models import Q
from django.db.models.functions import Concat
-from django_namespace_perms.admin import UserPermissionInline, UserPermissionInlineAdd, UserAdmin
+from django_namespace_perms.admin import (
+ UserPermissionInline,
+ UserPermissionInlineAdd,
+ UserAdmin,
+)
import reversion
from reversion.admin import VersionAdmin
@@ -31,23 +35,43 @@
from django_handleref.admin import VersionAdmin as HandleRefVersionAdmin
import peeringdb_server.admin_commandline_tools as acltools
-from peeringdb_server.views import (JsonResponse, HttpResponseForbidden)
+from peeringdb_server.views import JsonResponse, HttpResponseForbidden
from peeringdb_server.models import (
- REFTAG_MAP, QUEUE_ENABLED, COMMANDLINE_TOOLS, OrganizationMerge,
- OrganizationMergeEntity, Sponsorship, SponsorshipOrganization, Partnership,
- UserOrgAffiliationRequest, VerificationQueueItem, Organization, Facility,
- InternetExchange, Network, InternetExchangeFacility, IXLan,
- IXLanIXFMemberImportLog, IXLanIXFMemberImportLogEntry, IXLanPrefix,
- NetworkContact, NetworkFacility, NetworkIXLan, User, CommandLineTool, UTC,
- DeskProTicket)
+ REFTAG_MAP,
+ QUEUE_ENABLED,
+ COMMANDLINE_TOOLS,
+ OrganizationMerge,
+ OrganizationMergeEntity,
+ Sponsorship,
+ SponsorshipOrganization,
+ Partnership,
+ UserOrgAffiliationRequest,
+ VerificationQueueItem,
+ Organization,
+ Facility,
+ InternetExchange,
+ Network,
+ InternetExchangeFacility,
+ IXLan,
+ IXLanIXFMemberImportLog,
+ IXLanIXFMemberImportLogEntry,
+ IXLanPrefix,
+ NetworkContact,
+ NetworkFacility,
+ NetworkIXLan,
+ User,
+ CommandLineTool,
+ UTC,
+ DeskProTicket,
+)
from peeringdb_server.mail import mail_users_entity_merge
from peeringdb_server.inet import RdapLookup, RdapException
-delete_selected.short_description = u'HARD DELETE - Proceed with caution'
+delete_selected.short_description = u"HARD DELETE - Proceed with caution"
from django.utils.translation import ugettext_lazy as _
-#def _(x):
+# def _(x):
# return x
@@ -62,8 +86,12 @@ class StatusFilter(admin.SimpleListFilter):
dflt = "all"
def lookups(self, request, model_admin):
- return [("ok", "ok"), ("pending", "pending"), ("deleted", "deleted"),
- ("all", "all")]
+ return [
+ ("ok", "ok"),
+ ("pending", "pending"),
+ ("deleted", "deleted"),
+ ("all", "all"),
+ ]
def choices(self, cl):
val = self.value()
@@ -71,11 +99,9 @@ def choices(self, cl):
val = "all"
for lookup, title in self.lookup_choices:
yield {
- 'selected': val == lookup,
- 'query_string': cl.get_query_string({
- self.parameter_name: lookup
- }, []),
- 'display': title
+ "selected": val == lookup,
+ "query_string": cl.get_query_string({self.parameter_name: lookup}, []),
+ "display": title,
}
def queryset(self, request, queryset):
@@ -96,7 +122,8 @@ def fk_handleref_filter(form, field, tag=None):
if tag in REFTAG_MAP and form.instance:
model = REFTAG_MAP.get(tag)
qset = model.handleref.filter(
- Q(status="ok") | Q(id=getattr(form.instance, "%s_id" % field)))
+ Q(status="ok") | Q(id=getattr(form.instance, "%s_id" % field))
+ )
try:
qset = qset.order_by("name")
@@ -136,8 +163,7 @@ def merge_organizations(targets, target, request):
for org in targets:
if org == target:
- raise ValueError(
- _("Target org cannot be in selected organizations list"))
+ raise ValueError(_("Target org cannot be in selected organizations list"))
for org in targets:
@@ -178,16 +204,16 @@ def merge_organizations(targets, target, request):
org.delete()
org_merged += 1
- mail_users_entity_merge(source_admins,
- target.admin_usergroup.user_set.all(), org,
- target)
+ mail_users_entity_merge(
+ source_admins, target.admin_usergroup.user_set.all(), org, target
+ )
return {
"ix": ix_moved,
"fac": fac_moved,
"net": net_moved,
"user": user_moved,
- "org": org_merged
+ "org": org_merged,
}
@@ -195,10 +221,9 @@ def merge_organizations(targets, target, request):
class StatusForm(baseForms.ModelForm):
- status = baseForms.ChoiceField(choices=[("ok",
- "ok"), ("pending",
- "pending"), ("deleted",
- "deleted")])
+ status = baseForms.ChoiceField(
+ choices=[("ok", "ok"), ("pending", "pending"), ("deleted", "deleted")]
+ )
def __init__(self, *args, **kwargs):
super(StatusForm, self).__init__(*args, **kwargs)
@@ -207,18 +232,15 @@ def __init__(self, *args, **kwargs):
if inst.status == "ok":
self.fields["status"].choices = [("ok", "ok")]
elif inst.status == "pending":
- self.fields["status"].choices = [("ok", "ok"), ("pending",
- "pending")]
+ self.fields["status"].choices = [("ok", "ok"), ("pending", "pending")]
elif inst.status == "deleted":
- self.fields["status"].choices = [("ok", "ok"), ("deleted",
- "deleted")]
+ self.fields["status"].choices = [("ok", "ok"), ("deleted", "deleted")]
class ModelAdminWithUrlActions(admin.ModelAdmin):
def make_redirect(self, obj, action):
opts = obj.model._meta
- return redirect("admin:%s_%s_changelist" % (opts.app_label,
- opts.model_name))
+ return redirect("admin:%s_%s_changelist" % (opts.app_label, opts.model_name))
def actions_view(self, request, object_id, action, **kwargs):
"""
@@ -237,8 +259,9 @@ def actions_view(self, request, object_id, action, **kwargs):
return redir
# return redirect("admin:%s_%s_changelist" % (opts.app_label, opts.model_name))
return redirect(
- "admin:%s_%s_changelist" % (obj.model._meta.app_label,
- obj.model._meta.model_name))
+ "admin:%s_%s_changelist"
+ % (obj.model._meta.app_label, obj.model._meta.model_name)
+ )
def get_urls(self):
"""
@@ -247,9 +270,11 @@ def get_urls(self):
info = self.model._meta.app_label, self.model._meta.model_name
urls = [
- url(r'^(\d+)/action/([\w]+)/$',
+ url(
+ r"^(\d+)/action/([\w]+)/$",
self.admin_site.admin_view(self.actions_view),
- name="%s_%s_actions" % info),
+ name="%s_%s_actions" % info,
+ ),
] + super(ModelAdminWithUrlActions, self).get_urls()
return urls
@@ -278,14 +303,18 @@ def soft_delete(modeladmin, request, queryset):
class SanitizedAdmin(object):
def get_readonly_fields(self, request, obj=None):
- return ("version", ) + tuple(
- super(SanitizedAdmin, self).get_readonly_fields(request, obj=obj))
+ return ("version",) + tuple(
+ super(SanitizedAdmin, self).get_readonly_fields(request, obj=obj)
+ )
-class SoftDeleteAdmin(SanitizedAdmin, HandleRefVersionAdmin, VersionAdmin, admin.ModelAdmin):
+class SoftDeleteAdmin(
+ SanitizedAdmin, HandleRefVersionAdmin, VersionAdmin, admin.ModelAdmin
+):
"""
Soft delete admin
"""
+
actions = [soft_delete]
object_history_template = "handleref/grappelli/object_history.html"
version_details_template = "handleref/grappelli/version_details.html"
@@ -299,8 +328,7 @@ def has_delete_permission(self, request, obj=None):
def save_formset(self, request, form, formset, change):
if request.user:
reversion.set_user(request.user)
- super(SoftDeleteAdmin, self).save_formset(request, form, formset,
- change)
+ super(SoftDeleteAdmin, self).save_formset(request, form, formset, change)
class ModelAdminWithVQCtrl(object):
@@ -317,20 +345,20 @@ def get_fieldsets(self, request, obj=None):
"""
fieldsets = tuple(
- super(ModelAdminWithVQCtrl, self).get_fieldsets(request, obj=obj))
+ super(ModelAdminWithVQCtrl, self).get_fieldsets(request, obj=obj)
+ )
# on automatically defined fieldsets it will insert the controls
# somewhere towards the bottom, we dont want that - so we look for it and
# remove it
for k, s in fieldsets:
- if 'verification_queue' in s["fields"]:
+ if "verification_queue" in s["fields"]:
s["fields"].remove("verification_queue")
# attach controls to top of fieldset
- fieldsets = ((None, {
- 'classes': ('wide,'),
- 'fields': ('verification_queue', )
- }), ) + fieldsets
+ fieldsets = (
+ (None, {"classes": ("wide,"), "fields": ("verification_queue",)}),
+ ) + fieldsets
return fieldsets
def get_readonly_fields(self, request, obj=None):
@@ -338,9 +366,9 @@ def get_readonly_fields(self, request, obj=None):
make the modeladmin aware that "verification_queue" is a valid
readonly field
"""
- return ("verification_queue", ) + tuple(
- super(ModelAdminWithVQCtrl, self).get_readonly_fields(
- request, obj=obj))
+ return ("verification_queue",) + tuple(
+ super(ModelAdminWithVQCtrl, self).get_readonly_fields(request, obj=obj)
+ )
def verification_queue(self, obj):
"""
@@ -350,19 +378,16 @@ def verification_queue(self, obj):
if getattr(settings, "DISABLE_VERIFICATION_QUEUE", False):
return _("Verification Queue is currently disabled")
if self.model not in QUEUE_ENABLED:
- return _(
- "Verification Queue is currently disabled for this object type"
- )
+ return _("Verification Queue is currently disabled for this object type")
vq = VerificationQueueItem.objects.filter(
- content_type=ContentType.objects.get_for_model(type(obj)),
- object_id=obj.id).first()
+ content_type=ContentType.objects.get_for_model(type(obj)), object_id=obj.id
+ ).first()
if vq:
return (
u'{} {}'
- ).format(vq.approve_admin_url, _('APPROVE'), vq.deny_admin_url,
- _('DENY'))
+ ).format(vq.approve_admin_url, _("APPROVE"), vq.deny_admin_url, _("DENY"))
return _("APPROVED")
verification_queue.allow_tags = True
@@ -390,7 +415,8 @@ def ixf_import_attempt_info(self, obj):
def prefixes(self, obj):
return ", ".join(
- [str(ixpfx.prefix) for ixpfx in obj.ixpfx_set_active_or_pending])
+ [str(ixpfx.prefix) for ixpfx in obj.ixpfx_set_active_or_pending]
+ )
class InternetExchangeFacilityInline(SanitizedAdmin, admin.TabularInline):
@@ -429,18 +455,22 @@ class NetworkIXLanValidationMixin(object):
def clean_ipaddr4(self):
ipaddr4 = self.cleaned_data["ipaddr4"]
instance = self.instance
- if NetworkIXLan.objects.filter(
- ipaddr4=ipaddr4,
- status="ok").exclude(id=getattr(instance, "id", 0)).exists():
+ if (
+ NetworkIXLan.objects.filter(ipaddr4=ipaddr4, status="ok")
+ .exclude(id=getattr(instance, "id", 0))
+ .exists()
+ ):
raise ValidationError(_("Ipaddress already exists elsewhere"))
return ipaddr4
def clean_ipaddr6(self):
ipaddr6 = self.cleaned_data["ipaddr6"]
instance = self.instance
- if NetworkIXLan.objects.filter(
- ipaddr6=ipaddr6,
- status="ok").exclude(id=getattr(instance, "id", 0)).exists():
+ if (
+ NetworkIXLan.objects.filter(ipaddr6=ipaddr6, status="ok")
+ .exclude(id=getattr(instance, "id", 0))
+ .exists()
+ ):
raise ValidationError(_("Ipaddress already exists elsewhere"))
return ipaddr6
@@ -460,8 +490,7 @@ class UserOrgAffiliationRequestInlineForm(baseForms.ModelForm):
def clean(self):
super(UserOrgAffiliationRequestInlineForm, self).clean()
try:
- rdap_valid = RdapLookup().get_asn(
- self.cleaned_data.get("asn")).emails
+ rdap_valid = RdapLookup().get_asn(self.cleaned_data.get("asn")).emails
except RdapException as exc:
raise ValidationError({"asn": str(exc)})
@@ -470,16 +499,16 @@ class UserOrgAffiliationRequestInline(admin.TabularInline):
model = UserOrgAffiliationRequest
extra = 0
form = UserOrgAffiliationRequestInlineForm
- verbose_name_plural = _(
- "User is looking to be affiliated to these Organizations")
+ verbose_name_plural = _("User is looking to be affiliated to these Organizations")
def formfield_for_foreignkey(self, db_field, request, **kwargs):
if db_field.name == "org":
- kwargs["queryset"] = Organization.handleref.filter(
- status="ok").order_by("name")
- return super(
- UserOrgAffiliationRequestInline, self).formfield_for_foreignkey(
- db_field, request, **kwargs)
+ kwargs["queryset"] = Organization.handleref.filter(status="ok").order_by(
+ "name"
+ )
+ return super(UserOrgAffiliationRequestInline, self).formfield_for_foreignkey(
+ db_field, request, **kwargs
+ )
class InternetExchangeAdminForm(StatusForm):
@@ -489,20 +518,30 @@ def __init__(self, *args, **kwargs):
class InternetExchangeAdmin(ModelAdminWithVQCtrl, SoftDeleteAdmin):
- list_display = ('name', 'name_long', 'city', 'country', 'status',
- 'created', 'updated')
- ordering = ('-created', )
- list_filter = (StatusFilter, )
- search_fields = ('name', )
- readonly_fields = ('id', 'nsp_namespace', "ixf_import_history")
+ list_display = (
+ "name",
+ "name_long",
+ "city",
+ "country",
+ "status",
+ "created",
+ "updated",
+ )
+ ordering = ("-created",)
+ list_filter = (StatusFilter,)
+ search_fields = ("name",)
+ readonly_fields = ("id", "nsp_namespace", "ixf_import_history")
inlines = (InternetExchangeFacilityInline, IXLanInline)
form = InternetExchangeAdminForm
def ixf_import_history(self, obj):
return (u'{}').format(
urlresolvers.reverse(
- "admin:peeringdb_server_ixlanixfmemberimportlog_changelist"),
- obj.id, _('IXF Import History'))
+ "admin:peeringdb_server_ixlanixfmemberimportlog_changelist"
+ ),
+ obj.id,
+ _("IXF Import History"),
+ )
ixf_import_history.allow_tags = True
@@ -514,10 +553,10 @@ def __init__(self, *args, **kwargs):
class IXLanAdmin(SoftDeleteAdmin):
- list_display = ('ix', 'name', 'descr', 'status')
- search_fields = ('name', )
- list_filter = (StatusFilter, )
- readonly_fields = ('id', )
+ list_display = ("ix", "name", "descr", "status")
+ search_fields = ("name",)
+ list_filter = (StatusFilter,)
+ readonly_fields = ("id",)
inlines = (IXLanPrefixInline, NetworkInternetExchangeInline)
form = IXLanAdminForm
@@ -525,10 +564,27 @@ class IXLanAdmin(SoftDeleteAdmin):
class IXLanIXFMemberImportLogEntryInline(admin.TabularInline):
model = IXLanIXFMemberImportLogEntry
- fields = ("netixlan", "ipv4", "ipv6", "asn", "changes", "rollback_status", "action", "reason")
- readonly_fields = ("netixlan", "ipv4", "ipv6", "asn", "changes",
- "rollback_status", "action", "reason")
- raw_id_fields = ("netixlan", )
+ fields = (
+ "netixlan",
+ "ipv4",
+ "ipv6",
+ "asn",
+ "changes",
+ "rollback_status",
+ "action",
+ "reason",
+ )
+ readonly_fields = (
+ "netixlan",
+ "ipv4",
+ "ipv6",
+ "asn",
+ "changes",
+ "rollback_status",
+ "action",
+ "reason",
+ )
+ raw_id_fields = ("netixlan",)
extra = 0
@@ -559,7 +615,8 @@ def changes(self, obj):
v2 = vb.field_dict.get(k)
if v != v2:
if isinstance(v, ipaddress.IPv4Address) or isinstance(
- v, ipaddress.IPv6Address):
+ v, ipaddress.IPv6Address
+ ):
rv[k] = str(v)
else:
rv[k] = v
@@ -574,13 +631,15 @@ def rollback_status(self, obj):
text = _("CAN BE ROLLED BACK")
color = "#e5f3d6"
elif rs == 1:
- text = (u'{}
{}').format(
- _("CANNOT BE ROLLED BACK"), _("Has been changed since"))
+ text = (u"{}
{}").format(
+ _("CANNOT BE ROLLED BACK"), _("Has been changed since")
+ )
color = "#f3ded6"
elif rs == 2:
- text = (u'{}
{}').format(
+ text = (u"{}
{}").format(
_("CANNOT BE ROLLED BACK"),
- _("Netixlan with conflicting ipaddress now exists elsewhere"))
+ _("Netixlan with conflicting ipaddress now exists elsewhere"),
+ )
color = "#f3ded6"
elif rs == -1:
text = _("HAS BEEN ROLLED BACK")
@@ -591,10 +650,10 @@ def rollback_status(self, obj):
class IXLanIXFMemberImportLogAdmin(admin.ModelAdmin):
- search_fields = ("ixlan__ix__id", )
+ search_fields = ("ixlan__ix__id",)
list_display = ("id", "ix", "ixlan_name", "source", "created", "changes")
readonly_fields = ("ix", "ixlan_name", "source", "changes")
- inlines = (IXLanIXFMemberImportLogEntryInline, )
+ inlines = (IXLanIXFMemberImportLogEntryInline,)
actions = [rollback]
def has_delete_permission(self, request, obj=None):
@@ -607,38 +666,47 @@ def ix(self, obj):
return '{} (ID: {})'.format(
urlresolvers.reverse(
"admin:peeringdb_server_internetexchange_change",
- args=(obj.ixlan.ix.id, )), obj.ixlan.ix.name, obj.ixlan.ix.id)
+ args=(obj.ixlan.ix.id,),
+ ),
+ obj.ixlan.ix.name,
+ obj.ixlan.ix.id,
+ )
ix.allow_tags = True
def ixlan_name(self, obj):
return '{} (ID: {})'.format(
- urlresolvers.reverse("admin:peeringdb_server_ixlan_change",
- args=(obj.ixlan.id, )), obj.ixlan.name or "",
- obj.ixlan.id)
+ urlresolvers.reverse(
+ "admin:peeringdb_server_ixlan_change", args=(obj.ixlan.id,)
+ ),
+ obj.ixlan.name or "",
+ obj.ixlan.id,
+ )
ixlan_name.allow_tags = True
def source(self, obj):
return obj.ixlan.ixf_ixp_member_list_url
+
class SponsorshipOrganizationInline(admin.TabularInline):
model = SponsorshipOrganization
extra = 1
- raw_id_fields = ('org',)
+ raw_id_fields = ("org",)
autocomplete_lookup_fields = {
- 'fk': ['org'],
+ "fk": ["org"],
}
+
class SponsorshipAdmin(admin.ModelAdmin):
- list_display = ('organizations', 'start_date', 'end_date', 'level', 'status')
- readonly_fields = ('organizations', 'status', 'notify_date')
+ list_display = ("organizations", "start_date", "end_date", "level", "status")
+ readonly_fields = ("organizations", "status", "notify_date")
inlines = (SponsorshipOrganizationInline,)
- raw_id_fields = ('orgs',)
+ raw_id_fields = ("orgs",)
autocomplete_lookup_fields = {
- 'm2m': ['orgs'],
+ "m2m": ["orgs"],
}
def status(self, obj):
@@ -658,7 +726,6 @@ def status(self, obj):
status.allow_tags = True
-
def organizations(self, obj):
qset = obj.orgs.all().order_by("name")
if not qset.count():
@@ -667,6 +734,7 @@ def organizations(self, obj):
organizations.allow_tags = True
+
class PartnershipAdminForm(baseForms.ModelForm):
def __init__(self, *args, **kwargs):
super(PartnershipAdminForm, self).__init__(*args, **kwargs)
@@ -674,8 +742,8 @@ def __init__(self, *args, **kwargs):
class PartnershipAdmin(admin.ModelAdmin):
- list_display = ('org_name', 'level', 'status')
- readonly_fields = ('status', 'org_name')
+ list_display = ("org_name", "level", "status")
+ readonly_fields = ("status", "org_name")
form = PartnershipAdminForm
def org_name(self, obj):
@@ -695,18 +763,18 @@ def status(self, obj):
class OrganizationAdmin(ModelAdminWithVQCtrl, SoftDeleteAdmin):
- list_display = ('handle', 'name', 'status', 'created', 'updated')
- ordering = ('-created', )
- search_fields = ('name', )
- list_filter = (StatusFilter, )
- readonly_fields = ('id', 'nsp_namespace')
+ list_display = ("handle", "name", "status", "created", "updated")
+ ordering = ("-created",)
+ search_fields = ("name",)
+ list_filter = (StatusFilter,)
+ readonly_fields = ("id", "nsp_namespace")
form = StatusForm
def get_urls(self):
urls = super(OrganizationAdmin, self).get_urls()
my_urls = [
- url(r'^org-merge-tool/merge$', self.org_merge_tool_merge_action),
- url(r'^org-merge-tool/$', self.org_merge_tool_view),
+ url(r"^org-merge-tool/merge$", self.org_merge_tool_merge_action),
+ url(r"^org-merge-tool/$", self.org_merge_tool_view),
]
return my_urls + urls
@@ -715,19 +783,16 @@ def org_merge_tool_merge_action(self, request):
return HttpResponseForbidden(request)
try:
- orgs = Organization.objects.filter(
- id__in=request.GET.get("ids").split(","))
+ orgs = Organization.objects.filter(id__in=request.GET.get("ids").split(","))
except ValueError:
- return JsonResponse({
- "error": _("Malformed organization ids")
- }, status=400)
+ return JsonResponse({"error": _("Malformed organization ids")}, status=400)
try:
org = Organization.objects.get(id=request.GET.get("id"))
except Organization.DoesNotExist:
- return JsonResponse({
- "error": _("Merge target organization does not exist")
- }, status=400)
+ return JsonResponse(
+ {"error": _("Merge target organization does not exist")}, status=400
+ )
rv = merge_organizations(orgs, org, request)
@@ -739,8 +804,10 @@ def org_merge_tool_view(self, request):
context = dict(
self.admin_site.each_context(request),
undo_url=urlresolvers.reverse(
- "admin:peeringdb_server_organizationmerge_changelist"),
- title=_("Organization Merging Tool"))
+ "admin:peeringdb_server_organizationmerge_changelist"
+ ),
+ title=_("Organization Merging Tool"),
+ )
return TemplateResponse(request, "admin/org_merge_tool.html", context)
@@ -759,15 +826,15 @@ def has_delete_permission(self, request, obj=None):
class OrganizationMergeLog(ModelAdminWithUrlActions):
- list_display = ('id', 'from_org', 'to_org', 'created')
- search_fields = ('from_org__name', 'to_org__name')
- readonly_fields = ('from_org', 'to_org', 'undo_merge')
- inlines = (OrganizationMergeEntities, )
+ list_display = ("id", "from_org", "to_org", "created")
+ search_fields = ("from_org__name", "to_org__name")
+ readonly_fields = ("from_org", "to_org", "undo_merge")
+ inlines = (OrganizationMergeEntities,)
def undo_merge(self, obj):
return (
u'{}'
- ).format(_('Undo merge'))
+ ).format(_("Undo merge"))
undo_merge.allow_tags = True
@@ -793,12 +860,11 @@ def __init__(self, *args, **kwargs):
class FacilityAdmin(ModelAdminWithVQCtrl, SoftDeleteAdmin):
- list_display = ('name', 'org', 'city', 'country', 'status', 'created',
- 'updated')
- ordering = ('-created', )
- list_filter = (StatusFilter, )
- search_fields = ('name', )
- readonly_fields = ('id', 'nsp_namespace')
+ list_display = ("name", "org", "city", "country", "status", "created", "updated")
+ ordering = ("-created",)
+ list_filter = (StatusFilter,)
+ search_fields = ("name",)
+ readonly_fields = ("id", "nsp_namespace")
form = FacilityAdminForm
inlines = (
InternetExchangeFacilityInline,
@@ -808,9 +874,9 @@ class FacilityAdmin(ModelAdminWithVQCtrl, SoftDeleteAdmin):
class NetworkAdminForm(StatusForm):
- #set initial values on info_prefixes4 and 6 to 0
- #this streamlines the process of adding a network through
- #the django admin controlpanel (#289)
+ # set initial values on info_prefixes4 and 6 to 0
+ # this streamlines the process of adding a network through
+ # the django admin controlpanel (#289)
info_prefixes4 = baseForms.IntegerField(required=False, initial=0)
info_prefixes6 = baseForms.IntegerField(required=False, initial=0)
@@ -820,11 +886,11 @@ def __init__(self, *args, **kwargs):
class NetworkAdmin(ModelAdminWithVQCtrl, SoftDeleteAdmin):
- list_display = ('name', 'asn', 'aka', 'status', 'created', 'updated')
- ordering = ('-created', )
- list_filter = (StatusFilter, )
- search_fields = ('name', 'asn')
- readonly_fields = ('id', 'nsp_namespace')
+ list_display = ("name", "asn", "aka", "status", "created", "updated")
+ ordering = ("-created",)
+ list_filter = (StatusFilter,)
+ search_fields = ("name", "asn")
+ readonly_fields = ("id", "nsp_namespace")
form = NetworkAdminForm
inlines = (
@@ -833,18 +899,19 @@ class NetworkAdmin(ModelAdminWithVQCtrl, SoftDeleteAdmin):
NetworkInternetExchangeInline,
)
+
class InternetExchangeFacilityAdmin(SoftDeleteAdmin):
- list_display = ('id', 'ix', 'facility', 'status', 'created', 'updated')
- search_fields = ('ix__name', 'facility__name')
- readonly_fields = ('id',)
+ list_display = ("id", "ix", "facility", "status", "created", "updated")
+ search_fields = ("ix__name", "facility__name")
+ readonly_fields = ("id",)
list_filter = (StatusFilter,)
form = StatusForm
class IXLanPrefixAdmin(SoftDeleteAdmin):
- list_display = ('id', 'prefix', 'ixlan', 'ix', 'status', 'created', 'updated')
- readonly_fields = ('ix','id')
- search_fields = ('ixlan__name', 'ixlan__ix__name', 'prefix')
+ list_display = ("id", "prefix", "ixlan", "ix", "status", "created", "updated")
+ readonly_fields = ("ix", "id")
+ search_fields = ("ixlan__name", "ixlan__ix__name", "prefix")
list_filter = (StatusFilter,)
form = StatusForm
@@ -853,9 +920,28 @@ def ix(self, obj):
class NetworkIXLanAdmin(SoftDeleteAdmin):
- list_display = ('id', 'asn', 'net', 'ixlan', 'ix', 'ipaddr4', 'ipaddr6', 'status', 'created', 'updated')
- search_fields = ('asn', 'network__asn', 'network__name', 'ixlan__name', 'ixlan__ix__name', 'ipaddr4', 'ipaddr6')
- readonly_fields = ('id', 'ix','net')
+ list_display = (
+ "id",
+ "asn",
+ "net",
+ "ixlan",
+ "ix",
+ "ipaddr4",
+ "ipaddr6",
+ "status",
+ "created",
+ "updated",
+ )
+ search_fields = (
+ "asn",
+ "network__asn",
+ "network__name",
+ "ixlan__name",
+ "ixlan__ix__name",
+ "ipaddr4",
+ "ipaddr6",
+ )
+ readonly_fields = ("id", "ix", "net")
list_filter = (StatusFilter,)
form = StatusForm
@@ -867,9 +953,19 @@ def net(self, obj):
class NetworkContactAdmin(SoftDeleteAdmin):
- list_display = ('id', 'net', 'role', 'name', 'phone', 'email', 'status', 'created', 'updated')
- search_fields = ('network__asn', 'network__name')
- readonly_fields = ('id', 'net')
+ list_display = (
+ "id",
+ "net",
+ "role",
+ "name",
+ "phone",
+ "email",
+ "status",
+ "created",
+ "updated",
+ )
+ search_fields = ("network__asn", "network__name")
+ readonly_fields = ("id", "net")
list_filter = (StatusFilter,)
form = StatusForm
@@ -878,9 +974,9 @@ def net(self, obj):
class NetworkFacilityAdmin(SoftDeleteAdmin):
- list_display = ('id', 'net', 'facility', 'status', 'created', 'updated')
- search_fields = ('network__asn', 'network__name', 'facility__name')
- readonly_fields = ('id', 'net')
+ list_display = ("id", "net", "facility", "status", "created", "updated")
+ search_fields = ("network__asn", "network__name", "facility__name")
+ readonly_fields = ("id", "net")
list_filter = (StatusFilter,)
form = StatusForm
@@ -888,18 +984,15 @@ def net(self, obj):
return u"{} (AS{})".format(obj.network.name, obj.network.asn)
-
-
-
class VerificationQueueAdmin(ModelAdminWithUrlActions):
- list_display = ('content_type', 'item', 'created', 'view', 'extra')
- filter_fields = ('content_type', )
- readonly_fields = ('created', 'view', 'extra')
- search_fields = ('item', )
+ list_display = ("content_type", "item", "created", "view", "extra")
+ filter_fields = ("content_type",)
+ readonly_fields = ("created", "view", "extra")
+ search_fields = ("item",)
def get_search_results(self, request, queryset, search_term):
- #queryset, use_distinct = super(VerificationQueueAdmin, self).get_search_results(request, queryset, search_term)
- if not search_term or search_term == '':
+ # queryset, use_distinct = super(VerificationQueueAdmin, self).get_search_results(request, queryset, search_term)
+ if not search_term or search_term == "":
return queryset, False
use_distinct = True
@@ -912,20 +1005,22 @@ def get_search_results(self, request, queryset, search_term):
content_type = ContentType.objects.get_for_model(model)
for instance in list(qrs):
vq = VerificationQueueItem.objects.filter(
- content_type=content_type, object_id=instance.id)
- myset |= (queryset & vq)
+ content_type=content_type, object_id=instance.id
+ )
+ myset |= queryset & vq
return myset, use_distinct
def make_redirect(self, obj, action):
if action == "vq_approve":
opts = type(obj.first().item)._meta
return redirect(
- urlresolvers.reverse("admin:%s_%s_change" % (opts.app_label,
- opts.model_name),
- args=(obj.first().item.id, )))
+ urlresolvers.reverse(
+ "admin:%s_%s_change" % (opts.app_label, opts.model_name),
+ args=(obj.first().item.id,),
+ )
+ )
opts = obj.model._meta
- return redirect("admin:%s_%s_changelist" % (opts.app_label,
- opts.model_name))
+ return redirect("admin:%s_%s_changelist" % (opts.app_label, opts.model_name))
def vq_approve(self, request, queryset):
with reversion.create_revision():
@@ -944,29 +1039,28 @@ def vq_deny(modeladmin, request, queryset):
actions = [vq_approve, vq_deny]
def view(self, obj):
- return (u'{}').format(obj.item_admin_url, _('View'))
+ return (u'{}').format(obj.item_admin_url, _("View"))
view.allow_tags = True
def extra(self, obj):
- if hasattr(obj.item,
- "org") and obj.item.org.id == settings.SUGGEST_ENTITY_ORG:
+ if hasattr(obj.item, "org") and obj.item.org.id == settings.SUGGEST_ENTITY_ORG:
return "Suggestion"
return ""
class UserOrgAffiliationRequestAdmin(ModelAdminWithUrlActions):
- list_display = ('user', 'asn', 'org', 'created', 'status')
- search_fields = ('user', 'asn')
- readonly_fields = ('created', )
+ list_display = ("user", "asn", "org", "created", "status")
+ search_fields = ("user", "asn")
+ readonly_fields = ("created",)
def approve_and_notify(self, request, queryset):
for each in queryset:
each.approve()
each.notify_ownership_approved()
self.message_user(
- request,
- _("Affiliation request was approved and the user was notified."))
+ request, _("Affiliation request was approved and the user was notified.")
+ )
approve_and_notify.short_description = _("Approve and notify User")
@@ -1002,41 +1096,61 @@ def clean_username(self):
class Meta(forms.UserCreationForm.Meta):
model = User
- fields = ('username', 'password', 'email')
+ fields = ("username", "password", "email")
class UserAdmin(ModelAdminWithVQCtrl, UserAdmin):
- inlines = (UserOrgAffiliationRequestInline, )
- readonly_fields = ('email_status', 'organizations', 'view_permissions',
- 'change_password')
- list_display = ('username', 'email', 'first_name', 'last_name',
- 'email_status', 'status')
+ inlines = (UserOrgAffiliationRequestInline,)
+ readonly_fields = (
+ "email_status",
+ "organizations",
+ "view_permissions",
+ "change_password",
+ )
+ list_display = (
+ "username",
+ "email",
+ "first_name",
+ "last_name",
+ "email_status",
+ "status",
+ )
add_form = UserCreationForm
- add_fieldsets = ((None, {
- 'classes': ('wide', ),
- 'fields': ('username', 'password1', 'password2', 'email')
- }), )
- fieldsets = ((None, {
- 'classes': ('wide', ),
- 'fields': ('email_status', 'change_password')
- }), ) + UserAdmin.fieldsets + ((None, {
- 'classes': ('wide', ),
- 'fields': ('organizations', )
- }), )
+ add_fieldsets = (
+ (
+ None,
+ {
+ "classes": ("wide",),
+ "fields": ("username", "password1", "password2", "email"),
+ },
+ ),
+ )
+ fieldsets = (
+ ((None, {"classes": ("wide",), "fields": ("email_status", "change_password")}),)
+ + UserAdmin.fieldsets
+ + ((None, {"classes": ("wide",), "fields": ("organizations",)}),)
+ )
# we want to get rid of user permissions and group editor as that
# will be displayed on a separate page, for performance reasons
for name, grp in fieldsets:
- grp["fields"] = tuple([
- fld for fld in grp["fields"]
- if fld not in [
- "groups", "user_permissions", "is_staff", "is_active",
- "is_superuser"
+ grp["fields"] = tuple(
+ [
+ fld
+ for fld in grp["fields"]
+ if fld
+ not in [
+ "groups",
+ "user_permissions",
+ "is_staff",
+ "is_active",
+ "is_superuser",
+ ]
]
- ])
+ )
if name == "Permissions":
- grp["fields"] += ('view_permissions', )
+ grp["fields"] += ("view_permissions",)
def version(self, obj):
"""
@@ -1048,36 +1162,37 @@ def version(self, obj):
def change_password(self, obj):
return (u'{}').format(
- urlresolvers.reverse("admin:auth_user_password_change",
- args=(obj.id, )), _('Change Password'))
+ urlresolvers.reverse("admin:auth_user_password_change", args=(obj.id,)),
+ _("Change Password"),
+ )
change_password.allow_tags = True
def view_permissions(self, obj):
url = urlresolvers.reverse(
- "admin:%s_%s_change" % (UserPermission._meta.app_label,
- UserPermission._meta.model_name),
- args=(obj.id, ))
+ "admin:%s_%s_change"
+ % (UserPermission._meta.app_label, UserPermission._meta.model_name),
+ args=(obj.id,),
+ )
- return (u'{}').format(url, _('Edit Permissions'))
+ return (u'{}').format(url, _("Edit Permissions"))
view_permissions.allow_tags = True
def email_status(self, obj):
if obj.email_confirmed:
- return (u'{}').format(
- _("VERIFIED"))
+ return (u'{}').format(_("VERIFIED"))
else:
- return (u'{}').format(
- _("UNVERIFIED"))
+ return (u'{}').format(_("UNVERIFIED"))
email_status.allow_tags = True
def organizations(self, obj):
- return loader.get_template('admin/user-organizations.html').render({
- 'organizations': obj.organizations,
- 'user': obj
- }).replace("\n", "")
+ return (
+ loader.get_template("admin/user-organizations.html")
+ .render({"organizations": obj.organizations, "user": obj})
+ .replace("\n", "")
+ )
organizations.allow_tags = True
@@ -1091,31 +1206,45 @@ class Meta:
class UserPermissionAdmin(UserAdmin):
- search_fields = ('username', )
+ search_fields = ("username",)
- inlines = (UserOrgAffiliationRequestInline, UserPermissionInline,
- UserPermissionInlineAdd)
+ inlines = (
+ UserOrgAffiliationRequestInline,
+ UserPermissionInline,
+ UserPermissionInlineAdd,
+ )
- fieldsets = ((None, {
- 'fields': ('user', 'is_active', 'is_staff', 'is_superuser', 'groups',
- 'user_permissions'),
- 'classes': ('wide', )
- }), )
+ fieldsets = (
+ (
+ None,
+ {
+ "fields": (
+ "user",
+ "is_active",
+ "is_staff",
+ "is_superuser",
+ "groups",
+ "user_permissions",
+ ),
+ "classes": ("wide",),
+ },
+ ),
+ )
- readonly_fields = ('user', )
+ readonly_fields = ("user",)
def get_form(self, request, obj=None, **kwargs):
# we want to remove the password field from the form
# since we dont send it and dont want to run clean for it
- form = super(UserPermissionAdmin, self).get_form(
- request, obj, **kwargs)
- del form.base_fields['password']
+ form = super(UserPermissionAdmin, self).get_form(request, obj, **kwargs)
+ del form.base_fields["password"]
return form
def user(self, obj):
url = urlresolvers.reverse(
- "admin:%s_%s_change" % (User._meta.app_label,
- User._meta.model_name), args=(obj.id, ))
+ "admin:%s_%s_change" % (User._meta.app_label, User._meta.model_name),
+ args=(obj.id,),
+ )
return '%s' % (url, obj.username)
@@ -1204,7 +1333,7 @@ def sorters(item):
netixlan.ip_dupe = 4
netixlan.exchange = int(netixlan.ixlan.ix_id)
netixlan.ip_sorter = int(netixlan.ipaddr4)
- netixlan.dt_sorter = int(netixlan.updated.strftime('%s'))
+ netixlan.dt_sorter = int(netixlan.updated.strftime("%s"))
collisions.append(netixlan)
for ip, netixlans in ip6s.items():
@@ -1213,7 +1342,7 @@ def sorters(item):
netixlan.ip_dupe = 6
netixlan.exchange = int(netixlan.ixlan.ix_id)
netixlan.ip_sorter = int(netixlan.ipaddr6)
- netixlan.dt_sorter = int(netixlan.updated.strftime('%s'))
+ netixlan.dt_sorter = int(netixlan.updated.strftime("%s"))
collisions.append(netixlan)
if sort_keys != ["pk"] and sort_keys != ["-pk"]:
@@ -1233,17 +1362,27 @@ class DuplicateIPAdmin(SoftDeleteAdmin):
Can be removed after #92 is fully completed
"""
- list_display = ('id_ro', 'ip', 'asn', 'ix', 'net', 'updated_ro',
- 'status_ro')
- readonly_fields = ('id_ro', 'ip', 'net', 'ix', 'asn', 'updated_ro',
- 'status_ro')
+ list_display = ("id_ro", "ip", "asn", "ix", "net", "updated_ro", "status_ro")
+ readonly_fields = ("id_ro", "ip", "net", "ix", "asn", "updated_ro", "status_ro")
form = NetworkIXLanForm
list_per_page = 1000
- fieldsets = ((None, {
- 'classes': ('wide', ),
- 'fields': ('status', 'asn', 'ipaddr4', 'ipaddr6', 'ix', 'net',
- 'updated')
- }), )
+ fieldsets = (
+ (
+ None,
+ {
+ "classes": ("wide",),
+ "fields": (
+ "status",
+ "asn",
+ "ipaddr4",
+ "ipaddr6",
+ "ix",
+ "net",
+ "updated",
+ ),
+ },
+ ),
+ )
def get_changelist(self, request):
return DuplicateIPChangeList
@@ -1287,9 +1426,10 @@ def ix(self, obj):
ix.admin_order_field = "exchange"
def changelist_view(self, request, extra_context=None):
- extra_context = {'title': 'Duplicate IPs'}
+ extra_context = {"title": "Duplicate IPs"}
return super(DuplicateIPAdmin, self).changelist_view(
- request, extra_context=extra_context)
+ request, extra_context=extra_context
+ )
def has_add_permission(self, request):
return False
@@ -1303,6 +1443,7 @@ class CommandLineToolPrepareForm(baseForms.Form):
Form that allows user to select which commandline tool
to run
"""
+
tool = baseForms.ChoiceField(choices=COMMANDLINE_TOOLS)
@@ -1310,9 +1451,17 @@ class CommandLineToolAdmin(admin.ModelAdmin):
"""
View that lets staff users run peeringdb command line tools
"""
+
list_display = ("tool", "description", "user", "created", "status")
- readonly_fields = ("tool", "description", "arguments", "result", "user",
- "created", "status")
+ readonly_fields = (
+ "tool",
+ "description",
+ "arguments",
+ "result",
+ "user",
+ "created",
+ "status",
+ )
def has_delete_permission(self, request, obj=None):
return False
@@ -1320,12 +1469,21 @@ def has_delete_permission(self, request, obj=None):
def get_urls(self):
urls = super(CommandLineToolAdmin, self).get_urls()
my_urls = [
- url(r'^prepare/$', self.prepare_command_view,
- name="peeringdb_server_commandlinetool_prepare"),
- url(r'^preview/$', self.preview_command_view,
- name="peeringdb_server_commandlinetool_preview"),
- url(r'^run/$', self.run_command_view,
- name="peeringdb_server_commandlinetool_run"),
+ url(
+ r"^prepare/$",
+ self.prepare_command_view,
+ name="peeringdb_server_commandlinetool_prepare",
+ ),
+ url(
+ r"^preview/$",
+ self.preview_command_view,
+ name="peeringdb_server_commandlinetool_preview",
+ ),
+ url(
+ r"^run/$",
+ self.run_command_view,
+ name="peeringdb_server_commandlinetool_run",
+ ),
]
return my_urls + urls
@@ -1352,20 +1510,24 @@ def prepare_command_view(self, request):
else:
form = CommandLineToolPrepareForm()
- context.update({
- "adminform": helpers.AdminForm(
- form, list([(None, {
- 'fields': form.base_fields
- })]), self.get_prepopulated_fields(request)),
- "action": action,
- "app_label": self.model._meta.app_label,
- "opts": self.model._meta,
- "title": title
- })
+ context.update(
+ {
+ "adminform": helpers.AdminForm(
+ form,
+ list([(None, {"fields": form.base_fields})]),
+ self.get_prepopulated_fields(request),
+ ),
+ "action": action,
+ "app_label": self.model._meta.app_label,
+ "opts": self.model._meta,
+ "title": title,
+ }
+ )
return TemplateResponse(
request,
"admin/peeringdb_server/commandlinetool/prepare_command.html",
- context)
+ context,
+ )
def preview_command_view(self, request):
"""
@@ -1387,20 +1549,24 @@ def preview_command_view(self, request):
else:
raise Exception(_("Only POST requests allowed."))
- context.update({
- "adminform": helpers.AdminForm(
- form, list([(None, {
- 'fields': form.base_fields
- })]), self.get_prepopulated_fields(request)),
- "action": action,
- "app_label": self.model._meta.app_label,
- "opts": self.model._meta,
- "title": _("{} (Preview)").format(tool.name)
- })
+ context.update(
+ {
+ "adminform": helpers.AdminForm(
+ form,
+ list([(None, {"fields": form.base_fields})]),
+ self.get_prepopulated_fields(request),
+ ),
+ "action": action,
+ "app_label": self.model._meta.app_label,
+ "opts": self.model._meta,
+ "title": _("{} (Preview)").format(tool.name),
+ }
+ )
return TemplateResponse(
request,
"admin/peeringdb_server/commandlinetool/preview_command.html",
- context)
+ context,
+ )
def run_command_view(self, request):
"""
@@ -1419,24 +1585,27 @@ def run_command_view(self, request):
else:
raise Exception(_("Only POST requests allowed."))
- context.update({
- "adminform": helpers.AdminForm(
- form, list([(None, {
- 'fields': form.base_fields
- })]), self.get_prepopulated_fields(request)),
- "action": "run",
- "app_label": self.model._meta.app_label,
- "opts": self.model._meta,
- "title": tool.name
- })
+ context.update(
+ {
+ "adminform": helpers.AdminForm(
+ form,
+ list([(None, {"fields": form.base_fields})]),
+ self.get_prepopulated_fields(request),
+ ),
+ "action": "run",
+ "app_label": self.model._meta.app_label,
+ "opts": self.model._meta,
+ "title": tool.name,
+ }
+ )
return TemplateResponse(
- request, "admin/peeringdb_server/commandlinetool/run_command.html",
- context)
+ request, "admin/peeringdb_server/commandlinetool/run_command.html", context
+ )
class DeskProTicketAdmin(admin.ModelAdmin):
list_display = ("id", "subject", "user", "created", "published")
- readonly_fields = ("user", )
+ readonly_fields = ("user",)
admin.site.register(Facility, FacilityAdmin)
diff --git a/peeringdb_server/admin_commandline_tools.py b/peeringdb_server/admin_commandline_tools.py
index 1c83487e..edd81cc7 100644
--- a/peeringdb_server/admin_commandline_tools.py
+++ b/peeringdb_server/admin_commandline_tools.py
@@ -7,11 +7,18 @@
from dal import autocomplete
from django import forms
from django.core.management import call_command
-from peeringdb_server.models import (REFTAG_MAP, COMMANDLINE_TOOLS, CommandLineTool,
- InternetExchange, Facility, IXLan)
+from peeringdb_server.models import (
+ REFTAG_MAP,
+ COMMANDLINE_TOOLS,
+ CommandLineTool,
+ InternetExchange,
+ Facility,
+ IXLan,
+)
from peeringdb_server import maintenance
+
def _(m):
return m
@@ -114,8 +121,9 @@ def _run(self, user, commit=False):
try:
self.validate()
if commit:
- call_command(self.tool, *self.args, commit=True, stdout=r,
- **self.kwargs)
+ call_command(
+ self.tool, *self.args, commit=True, stdout=r, **self.kwargs
+ )
else:
call_command(self.tool, *self.args, stdout=r, **self.kwargs)
self.result = r.getvalue()
@@ -127,33 +135,48 @@ def _run(self, user, commit=False):
maintenance.off()
if commit:
- CommandLineTool.objects.create(user=user, tool=self.tool,
- description=self.description,
- status="done",
- arguments=json.dumps({
- "args": self.args,
- "kwargs": self.kwargs
- }), result=self.result)
+ CommandLineTool.objects.create(
+ user=user,
+ tool=self.tool,
+ description=self.description,
+ status="done",
+ arguments=json.dumps({"args": self.args, "kwargs": self.kwargs}),
+ result=self.result,
+ )
return self.result
def run(self, user, commit=False):
if self.queue and commit:
- if CommandLineTool.objects.filter(tool=self.tool).exclude(status="done").count() >= self.queue:
- self.result = "[error] {}".format(_("This command is already waiting / running - please wait for it to finish before executing it again"))
+ if (
+ CommandLineTool.objects.filter(tool=self.tool)
+ .exclude(status="done")
+ .count()
+ >= self.queue
+ ):
+ self.result = "[error] {}".format(
+ _(
+ "This command is already waiting / running - please wait for it to finish before executing it again"
+ )
+ )
return self.result
- CommandLineTool.objects.create(user=user, tool=self.tool,
- description=self.description,
- status="waiting",
- arguments=json.dumps({
- "args": self.args,
- "kwargs": self.kwargs
- }), result="")
-
- self.result = "[warn] {}".format(_("This command takes a while to complete and will be queued and ran in the "\
- "background. No output log can be provided at this point in time. You may "\
- "review once the command has finished."))
+ CommandLineTool.objects.create(
+ user=user,
+ tool=self.tool,
+ description=self.description,
+ status="waiting",
+ arguments=json.dumps({"args": self.args, "kwargs": self.kwargs}),
+ result="",
+ )
+
+ self.result = "[warn] {}".format(
+ _(
+ "This command takes a while to complete and will be queued and ran in the "
+ "background. No output log can be provided at this point in time. You may "
+ "review once the command has finished."
+ )
+ )
return self.result
else:
with reversion.create_revision():
@@ -175,19 +198,28 @@ class ToolRenumberLans(CommandLineToolWrapper):
class Form(forms.Form):
exchange = forms.ModelChoiceField(
queryset=InternetExchange.handleref.undeleted().order_by("name"),
- widget=autocomplete.ModelSelect2(url="/autocomplete/ix/json"))
+ widget=autocomplete.ModelSelect2(url="/autocomplete/ix/json"),
+ )
old_prefix = forms.CharField(
- help_text=_("Old prefix - renumber all netixlans that fall into this prefix"))
+ help_text=_(
+ "Old prefix - renumber all netixlans that fall into this prefix"
+ )
+ )
new_prefix = forms.CharField(
- help_text=_("New prefix - needs to be the same protocol and length as old prefix"))
+ help_text=_(
+ "New prefix - needs to be the same protocol and length as old prefix"
+ )
+ )
@property
def description(self):
""" Provide a human readable description of the command that was run """
try:
return "{}: {} to {}".format(
- InternetExchange.objects.get(id=self.args[0]), self.args[1],
- self.args[2])
+ InternetExchange.objects.get(id=self.args[0]),
+ self.args[1],
+ self.args[2],
+ )
except:
# if a version of this command was run before, we still need to able
# to display a somewhat useful discription, so fall back to this basic
@@ -195,7 +227,11 @@ def description(self):
return "(Legacy) {}".format(self.args)
def set_arguments(self, form_data):
- self.args = [form_data.get("exchange", EmptyId()).id, form_data.get("old_prefix"), form_data.get("new_prefix")]
+ self.args = [
+ form_data.get("exchange", EmptyId()).id,
+ form_data.get("old_prefix"),
+ form_data.get("new_prefix"),
+ ]
@register_tool
@@ -211,24 +247,27 @@ class Form(forms.Form):
other = forms.ModelChoiceField(
queryset=Facility.handleref.undeleted().order_by("name"),
widget=autocomplete.ModelSelect2(url="/autocomplete/fac/json"),
- help_text=_("Merge this facility - it will be deleted"))
+ help_text=_("Merge this facility - it will be deleted"),
+ )
target = forms.ModelChoiceField(
queryset=Facility.handleref.undeleted().order_by("name"),
widget=autocomplete.ModelSelect2(url="/autocomplete/fac/json"),
- help_text=_("Target facility"))
+ help_text=_("Target facility"),
+ )
@property
def description(self):
""" Provide a human readable description of the command that was run """
return "{} into {}".format(
Facility.objects.get(id=self.kwargs["ids"]),
- Facility.objects.get(id=self.kwargs["target"]))
+ Facility.objects.get(id=self.kwargs["target"]),
+ )
def set_arguments(self, form_data):
self.kwargs = {
"ids": str(form_data.get("other", EmptyId()).id),
- "target": str(form_data.get("target", EmptyId()).id)
+ "target": str(form_data.get("target", EmptyId()).id),
}
@@ -243,11 +282,14 @@ class ToolMergeFacilitiesUndo(CommandLineToolWrapper):
class Form(forms.Form):
merge = forms.ModelChoiceField(
- queryset=CommandLineTool.objects.filter(
- tool="pdb_fac_merge").order_by("-created"),
+ queryset=CommandLineTool.objects.filter(tool="pdb_fac_merge").order_by(
+ "-created"
+ ),
widget=autocomplete.ModelSelect2(
- url="/autocomplete/admin/clt-history/pdb_fac_merge/"),
- help_text=_("Undo this merge"))
+ url="/autocomplete/admin/clt-history/pdb_fac_merge/"
+ ),
+ help_text=_("Undo this merge"),
+ )
@property
def description(self):
@@ -256,15 +298,17 @@ def description(self):
# in order to make a useful description we need to collect the arguments
# from the merge command that was undone
kwargs = json.loads(
- CommandLineTool.objects.get(
- id=self.kwargs["clt"]).arguments).get("kwargs")
+ CommandLineTool.objects.get(id=self.kwargs["clt"]).arguments
+ ).get("kwargs")
return "Undo: {} into {}".format(
Facility.objects.get(id=kwargs["ids"]),
- Facility.objects.get(id=kwargs["target"]))
+ Facility.objects.get(id=kwargs["target"]),
+ )
def set_arguments(self, form_data):
self.kwargs = {"clt": form_data.get("merge", EmptyId()).id}
+
@register_tool
class ToolReset(CommandLineToolWrapper):
tool = "pdb_wipe"
@@ -272,10 +316,18 @@ class ToolReset(CommandLineToolWrapper):
maintenance = True
class Form(forms.Form):
- keep_users = forms.BooleanField(required=False,
- help_text=_("Don't delete users. Note that superuser accounts are always kept - regardless of this setting."))
- load_data = forms.BooleanField(required=False, initial=True, help_text=_("Load data from peeringdb API"))
- load_data_url = forms.CharField(required=False, initial="https://www.peeringdb.com/api")
+ keep_users = forms.BooleanField(
+ required=False,
+ help_text=_(
+ "Don't delete users. Note that superuser accounts are always kept - regardless of this setting."
+ ),
+ )
+ load_data = forms.BooleanField(
+ required=False, initial=True, help_text=_("Load data from peeringdb API")
+ )
+ load_data_url = forms.CharField(
+ required=False, initial="https://www.peeringdb.com/api"
+ )
@property
def description(self):
@@ -290,18 +342,19 @@ class ToolUndelete(CommandLineToolWrapper):
"""
Allows restoration of an object object and it's child objects
"""
+
tool = "pdb_undelete"
# These are the reftags that are currently supported by this
# tool.
- supported_reftags = ["ixlan","fac"]
+ supported_reftags = ["ixlan", "fac"]
class Form(forms.Form):
version = forms.ModelChoiceField(
queryset=Version.objects.all().order_by("-revision_id"),
- widget=autocomplete.ModelSelect2(
- url="/autocomplete/admin/deletedversions"),
- help_text=_("Restore this object - search by [reftag] [id]"))
+ widget=autocomplete.ModelSelect2(url="/autocomplete/admin/deletedversions"),
+ help_text=_("Restore this object - search by [reftag] [id]"),
+ )
@property
def description(self):
@@ -312,13 +365,23 @@ def set_arguments(self, form_data):
if not version:
return
reftag = version.content_type.model_class().HandleRef.tag
- self.kwargs = {"reftag":reftag, "id":version.object_id, "version_id":version.id}
+ self.kwargs = {
+ "reftag": reftag,
+ "id": version.object_id,
+ "version_id": version.id,
+ }
def validate(self):
if self.kwargs.get("reftag") not in self.supported_reftags:
- raise ValueError(_("Only {} type objects may be restored " \
- "through this interface at this point").format(",".join(self.supported_reftags)))
-
- obj = REFTAG_MAP[self.kwargs.get("reftag")].objects.get(id=self.kwargs.get("id"))
+ raise ValueError(
+ _(
+ "Only {} type objects may be restored "
+ "through this interface at this point"
+ ).format(",".join(self.supported_reftags))
+ )
+
+ obj = REFTAG_MAP[self.kwargs.get("reftag")].objects.get(
+ id=self.kwargs.get("id")
+ )
if obj.status != "deleted":
raise ValueError("{} is not currently marked as deleted".format(obj))
diff --git a/peeringdb_server/api_cache.py b/peeringdb_server/api_cache.py
index 8714d313..d67f203f 100644
--- a/peeringdb_server/api_cache.py
+++ b/peeringdb_server/api_cache.py
@@ -4,7 +4,7 @@
from django.conf import settings
-from peeringdb_server.models import (InternetExchange, IXLan, Network)
+from peeringdb_server.models import InternetExchange, IXLan, Network
import django_namespace_perms.util as nsp
@@ -46,8 +46,10 @@ def __init__(self, viewset, qset, filters):
self.fields = request.query_params.get("fields")
if self.fields:
self.fields = self.fields.split(",")
- self.path = os.path.join(settings.API_CACHE_ROOT, "%s-%s.json" %
- (viewset.model.handleref.tag, self.depth))
+ self.path = os.path.join(
+ settings.API_CACHE_ROOT,
+ "%s-%s.json" % (viewset.model.handleref.tag, self.depth),
+ )
def qualifies(self):
"""
@@ -59,8 +61,12 @@ def qualifies(self):
return False
# no depth and a limit lower than 251 seems like a tipping point
# were non-cache retrieval is faster still
- if not self.depth and self.limit and self.limit <= 250 and getattr(
- settings, "API_CACHE_ALL_LIMITS", False) is False:
+ if (
+ not self.depth
+ and self.limit
+ and self.limit <= 250
+ and getattr(settings, "API_CACHE_ALL_LIMITS", False) is False
+ ):
return False
# filters have been specified, no
if self.filters or self.since:
@@ -89,25 +95,19 @@ def load(self):
data = data.get("data")
# apply permissions to data
- fnc = getattr(self, "apply_permissions_%s" % self.model.handleref.tag,
- None)
+ fnc = getattr(self, "apply_permissions_%s" % self.model.handleref.tag, None)
if fnc:
data = fnc(data)
# apply pagination
if self.skip and self.limit:
- data = data[self.skip:self.skip + self.limit]
+ data = data[self.skip : self.skip + self.limit]
elif self.skip:
- data = data[self.skip:]
+ data = data[self.skip :]
elif self.limit:
- data = data[:self.limit]
+ data = data[: self.limit]
- return {
- "results": data,
- "__meta": {
- "generated": os.path.getmtime(self.path)
- }
- }
+ return {"results": data, "__meta": {"generated": os.path.getmtime(self.path)}}
def apply_permissions(self, ns, data, ruleset={}):
"""
@@ -129,11 +129,12 @@ def apply_permissions(self, ns, data, ruleset={}):
return nsp.dict_get_path(
nsp.permissions_apply(
- nsp.dict_from_namespace(ns, data), self.request.user,
- ruleset=ruleset), ns)
+ nsp.dict_from_namespace(ns, data), self.request.user, ruleset=ruleset
+ ),
+ ns,
+ )
- def apply_permissions_generic(self, data, explicit=False, join_ids=[],
- **kwargs):
+ def apply_permissions_generic(self, data, explicit=False, join_ids=[], **kwargs):
"""
Apply permissions to all rows according to rules
specified in parameters
@@ -160,9 +161,9 @@ def apply_permissions_generic(self, data, explicit=False, join_ids=[],
for t, p, model in join_ids:
joined_ids[t] = {
"p": p,
- "ids": self.join_ids(data, t, p, model,
- joined_ids.get(p, e).get("ids",
- e).values())
+ "ids": self.join_ids(
+ data, t, p, model, joined_ids.get(p, e).get("ids", e).values()
+ ),
}
for row in data:
@@ -230,10 +231,12 @@ def join_ids(self, data, target_id, proxy_id, model, stash=[]):
else:
ids = [r[proxy_id] for r in data]
- return dict([
- (r["id"], r[target_id])
- for r in model.objects.filter(id__in=ids).values("id", target_id)
- ])
+ return dict(
+ [
+ (r["id"], r[target_id])
+ for r in model.objects.filter(id__in=ids).values("id", target_id)
+ ]
+ )
# permissioning functions for each handlref type
@@ -241,45 +244,62 @@ def apply_permissions_org(self, data):
return self.apply_permissions_generic(data, id="id")
def apply_permissions_fac(self, data):
- return self.apply_permissions_generic(data, fac_id="id",
- org_id="org_id")
+ return self.apply_permissions_generic(data, fac_id="id", org_id="org_id")
def apply_permissions_ix(self, data):
- return self.apply_permissions_generic(data, ix_id="id",
- org_id="org_id")
+ return self.apply_permissions_generic(data, ix_id="id", org_id="org_id")
def apply_permissions_net(self, data):
- return self.apply_permissions_generic(data, net_id="id",
- org_id="org_id")
+ return self.apply_permissions_generic(data, net_id="id", org_id="org_id")
def apply_permissions_ixpfx(self, data):
return self.apply_permissions_generic(
- data, join_ids=[("ix_id", "ixlan_id", IXLan),
- ("org_id", "ix_id",
- InternetExchange)], ixlan_id="ixlan_id", id="id")
+ data,
+ join_ids=[
+ ("ix_id", "ixlan_id", IXLan),
+ ("org_id", "ix_id", InternetExchange),
+ ],
+ ixlan_id="ixlan_id",
+ id="id",
+ )
def apply_permissions_ixlan(self, data):
return self.apply_permissions_generic(
- data, join_ids=[("org_id", "ix_id",
- InternetExchange)], ix_id="ix_id", id="id")
+ data,
+ join_ids=[("org_id", "ix_id", InternetExchange)],
+ ix_id="ix_id",
+ id="id",
+ )
def apply_permissions_ixfac(self, data):
return self.apply_permissions_generic(
- data, join_ids=[("org_id", "ix_id",
- InternetExchange)], ix_id="ix_id", id="id")
+ data,
+ join_ids=[("org_id", "ix_id", InternetExchange)],
+ ix_id="ix_id",
+ id="id",
+ )
def apply_permissions_netfac(self, data):
return self.apply_permissions_generic(
- data, join_ids=[("org_id", "net_id",
- Network)], net_id="net_id", fac_id="fac_id")
+ data,
+ join_ids=[("org_id", "net_id", Network)],
+ net_id="net_id",
+ fac_id="fac_id",
+ )
def apply_permissions_netixlan(self, data):
return self.apply_permissions_generic(
- data, join_ids=[("org_id", "net_id",
- Network)], net_id="net_id", ixlan_id="ixlan_id")
+ data,
+ join_ids=[("org_id", "net_id", Network)],
+ net_id="net_id",
+ ixlan_id="ixlan_id",
+ )
def apply_permissions_poc(self, data):
return self.apply_permissions_generic(
- data, explicit=lambda x: (x.get("visible") != "Public"),
- join_ids=[("org_id", "net_id",
- Network)], vis="visible", net_id="net_id")
+ data,
+ explicit=lambda x: (x.get("visible") != "Public"),
+ join_ids=[("org_id", "net_id", Network)],
+ vis="visible",
+ net_id="net_id",
+ )
diff --git a/peeringdb_server/autocomplete_views.py b/peeringdb_server/autocomplete_views.py
index ae3029d8..07fdbaea 100644
--- a/peeringdb_server/autocomplete_views.py
+++ b/peeringdb_server/autocomplete_views.py
@@ -6,9 +6,16 @@
from django.core.exceptions import ObjectDoesNotExist
from reversion.models import Version
from dal import autocomplete
-from peeringdb_server.models import (InternetExchange, Facility,
- NetworkFacility, InternetExchangeFacility,
- Organization, IXLan, CommandLineTool, REFTAG_MAP)
+from peeringdb_server.models import (
+ InternetExchange,
+ Facility,
+ NetworkFacility,
+ InternetExchangeFacility,
+ Organization,
+ IXLan,
+ CommandLineTool,
+ REFTAG_MAP,
+)
from peeringdb_server.admin_commandline_tools import TOOL_MAP
@@ -18,10 +25,11 @@ def has_add_permissions(self, request):
return False
def render_to_response(self, context):
- q = self.request.GET.get('q', None)
- return http.HttpResponse("".join(
- [i.get("text") for i in self.get_results(context)]),
- content_type="text/html")
+ q = self.request.GET.get("q", None)
+ return http.HttpResponse(
+ "".join([i.get("text") for i in self.get_results(context)]),
+ content_type="text/html",
+ )
class ExchangeAutocompleteJSON(autocomplete.Select2QuerySetView):
@@ -29,7 +37,7 @@ def get_queryset(self):
qs = InternetExchange.objects.filter(status="ok")
if self.q:
qs = qs.filter(name__icontains=self.q)
- qs = qs.order_by('name')
+ qs = qs.order_by("name")
return qs
@@ -38,12 +46,14 @@ def get_queryset(self):
qs = InternetExchange.objects.filter(status="ok")
if self.q:
qs = qs.filter(name__icontains=self.q)
- qs = qs.order_by('name')
+ qs = qs.order_by("name")
return qs
def get_result_label(self, item):
return u'%s
' % (
- item.pk, html.escape(item.name))
+ item.pk,
+ html.escape(item.name),
+ )
class FacilityAutocompleteJSON(autocomplete.Select2QuerySetView):
@@ -51,7 +61,7 @@ def get_queryset(self):
qs = Facility.objects.filter(status="ok")
if self.q:
qs = qs.filter(name__icontains=self.q)
- qs = qs.order_by('name')
+ qs = qs.order_by("name")
return qs
@@ -59,14 +69,15 @@ class FacilityAutocomplete(AutocompleteHTMLResponse):
def get_queryset(self):
qs = Facility.objects.filter(status="ok")
if self.q:
- qs = qs.filter(
- Q(name__icontains=self.q) | Q(address1__icontains=self.q))
- qs = qs.order_by('name')
+ qs = qs.filter(Q(name__icontains=self.q) | Q(address1__icontains=self.q))
+ qs = qs.order_by("name")
return qs
def get_result_label(self, item):
- return u'%s
%s
' % (
- item.pk, html.escape(item.name), html.escape(item.address1))
+ return (
+ u'%s
%s
'
+ % (item.pk, html.escape(item.name), html.escape(item.address1))
+ )
class FacilityAutocompleteForNetwork(FacilityAutocomplete):
@@ -75,8 +86,7 @@ def get_queryset(self):
net_id = self.request.resolver_match.kwargs.get("net_id")
fac_ids = [
nf.facility_id
- for nf in NetworkFacility.objects.filter(status="ok",
- network_id=net_id)
+ for nf in NetworkFacility.objects.filter(status="ok", network_id=net_id)
]
qs = qs.exclude(id__in=fac_ids)
return qs
@@ -88,8 +98,7 @@ def get_queryset(self):
ix_id = self.request.resolver_match.kwargs.get("ix_id")
fac_ids = [
nf.facility_id
- for nf in InternetExchangeFacility.objects.filter(
- status="ok", ix_id=ix_id)
+ for nf in InternetExchangeFacility.objects.filter(status="ok", ix_id=ix_id)
]
qs = qs.exclude(id__in=fac_ids)
return qs
@@ -100,12 +109,14 @@ def get_queryset(self):
qs = Organization.objects.filter(status="ok")
if self.q:
qs = qs.filter(name__icontains=self.q)
- qs = qs.order_by('name')
+ qs = qs.order_by("name")
return qs
def get_result_label(self, item):
return u'%s
' % (
- item.pk, html.escape(item.name))
+ item.pk,
+ html.escape(item.name),
+ )
class IXLanAutocomplete(AutocompleteHTMLResponse):
@@ -113,16 +124,22 @@ def get_queryset(self):
qs = IXLan.objects.filter(status="ok").select_related("ix")
if self.q:
qs = qs.filter(
- Q(ix__name__icontains=self.q)
- | Q(ix__name_long__icontains=self.q))
- qs = qs.order_by('ix__name')
+ Q(ix__name__icontains=self.q) | Q(ix__name_long__icontains=self.q)
+ )
+ qs = qs.order_by("ix__name")
return qs
def get_result_label(self, item):
- return u' %s
%s
' % (
- item.pk, html.escape(item.ix.name),
- html.escape(item.ix.country.code), html.escape(item.ix.name_long),
- html.escape(item.name))
+ return (
+ u' %s
%s
'
+ % (
+ item.pk,
+ html.escape(item.ix.name),
+ html.escape(item.ix.country.code),
+ html.escape(item.ix.name_long),
+ html.escape(item.name),
+ )
+ )
class DeletedVersionAutocomplete(autocomplete.Select2QuerySetView):
@@ -136,7 +153,6 @@ def get_queryset(self):
if not self.request.user.is_staff:
return []
-
# no query supplied, return empty result
if not self.q:
return []
@@ -155,7 +171,11 @@ def get_queryset(self):
except (KeyError, ObjectDoesNotExist):
return []
- versions = Version.objects.get_for_object(obj).order_by("revision_id").select_related("revision")
+ versions = (
+ Version.objects.get_for_object(obj)
+ .order_by("revision_id")
+ .select_related("revision")
+ )
rv = []
previous = {}
@@ -179,25 +199,24 @@ def get_result_label(self, item):
return "{} - {}".format(item, str(item.revision.date_created).split(".")[0])
-
class CommandLineToolHistoryAutocomplete(autocomplete.Select2QuerySetView):
"""
Autocomplete for command line tools that were ran via the admin ui
"""
+
tool = ""
def get_queryset(self):
# Only staff needs to be able to see these
if not self.request.user.is_staff:
return []
- qs = CommandLineTool.objects.filter(
- tool=self.tool).order_by("-created")
+ qs = CommandLineTool.objects.filter(tool=self.tool).order_by("-created")
if self.q:
qs = qs.filter(description__icontains=self.q)
return qs
def get_result_label(self, item):
- return (item.description or self.tool)
+ return item.description or self.tool
clt_history = {}
diff --git a/peeringdb_server/data_views.py b/peeringdb_server/data_views.py
index 90b8f98c..b316590b 100644
--- a/peeringdb_server/data_views.py
+++ b/peeringdb_server/data_views.py
@@ -14,10 +14,9 @@
from django.utils import translation
from django.utils.translation import ugettext_lazy as _
-from peeringdb_server.models import (
- Organization, Network, Sponsorship)
+from peeringdb_server.models import Organization, Network, Sponsorship
-#def _(x):
+# def _(x):
# return x
# until django-peeringdb is updated we want to remove
@@ -34,14 +33,14 @@
# values in a comma separated fashion - user for
# advanced search
const.RATIOS_ADVS = list(const.RATIOS[1:])
-const.RATIOS_ADVS[0] = (",%s" % const.RATIOS_ADVS[0][0],
- const.RATIOS_ADVS[0][1])
+const.RATIOS_ADVS[0] = (",%s" % const.RATIOS_ADVS[0][0], const.RATIOS_ADVS[0][1])
const.SCOPES_ADVS = list(const.SCOPES[1:])
-const.SCOPES_ADVS[0] = (",%s" % const.SCOPES_ADVS[0][0],
- const.SCOPES_ADVS[0][1])
+const.SCOPES_ADVS[0] = (",%s" % const.SCOPES_ADVS[0][0], const.SCOPES_ADVS[0][1])
const.NET_TYPES_ADVS = list(const.NET_TYPES[1:])
-const.NET_TYPES_ADVS[0] = (",%s" % const.NET_TYPES_ADVS[0][0],
- const.NET_TYPES_ADVS[0][1])
+const.NET_TYPES_ADVS[0] = (
+ ",%s" % const.NET_TYPES_ADVS[0][0],
+ const.NET_TYPES_ADVS[0][1],
+)
const.ORG_GROUPS = (("member", "member"), ("admin", "admin"))
@@ -56,15 +55,15 @@ def countries_w_blank(request):
Returns all valid countries and their country codes with a blank field
"""
- return JsonResponse({
- "countries_b": [{
- "id": "",
- "name": ""
- }] + [{
- "id": unicode(code),
- "name": unicode(name)
- } for code, name in list(django_countries.countries)]
- })
+ return JsonResponse(
+ {
+ "countries_b": [{"id": "", "name": ""}]
+ + [
+ {"id": unicode(code), "name": unicode(name)}
+ for code, name in list(django_countries.countries)
+ ]
+ }
+ )
def countries(request):
@@ -72,12 +71,15 @@ def countries(request):
Returns all valid countries and their country codes
"""
- return JsonResponse({
- "countries": [{
- "id": unicode(code),
- "name": unicode(name)
- } for code, name in list(django_countries.countries)]
- })
+ return JsonResponse(
+ {
+ "countries": [
+ {"id": unicode(code), "name": unicode(name)}
+ for code, name in list(django_countries.countries)
+ ]
+ }
+ )
+
def sponsorships(request):
"""
@@ -86,11 +88,10 @@ def sponsorships(request):
sponsors = {}
for org, sponsorship in Sponsorship.active_by_org():
- sponsors[org.id] = {"id":org.id, "name":sponsorship.label.lower()}
+ sponsors[org.id] = {"id": org.id, "name": sponsorship.label.lower()}
+
+ return JsonResponse({"sponsors": sponsors,})
- return JsonResponse({
- "sponsors": sponsors,
- })
@login_required
def facilities(request):
@@ -98,13 +99,14 @@ def facilities(request):
Returns all valid facilities with id and name
"""
- return JsonResponse({
- "facilities": [{
- "id": fac.id,
- "name": unicode(fac.name)
- } for fac in models.Facility.handleref.all().undeleted()
- .order_by("name")]
- })
+ return JsonResponse(
+ {
+ "facilities": [
+ {"id": fac.id, "name": unicode(fac.name)}
+ for fac in models.Facility.handleref.all().undeleted().order_by("name")
+ ]
+ }
+ )
def decode(value):
@@ -124,22 +126,43 @@ def decode(value):
def enum(request, name):
if name.upper() not in [
- "RATIOS", "RATIOS_TRUNC", "RATIOS_ADVS", "TRAFFIC", "SCOPES",
- "SCOPES_TRUNC", "SCOPES_ADVS", "NET_TYPES", "NET_TYPES_TRUNC",
- "NET_TYPES_ADVS", "POLICY_GENERAL", "POLICY_LOCATIONS",
- "POLICY_CONTRACTS", "REGIONS", "POC_ROLES", "MEDIA", "PROTOCOLS",
- "ORG_GROUPS", "BOOL_CHOICE_STR", "VISIBILITY"
+ "RATIOS",
+ "RATIOS_TRUNC",
+ "RATIOS_ADVS",
+ "TRAFFIC",
+ "SCOPES",
+ "SCOPES_TRUNC",
+ "SCOPES_ADVS",
+ "NET_TYPES",
+ "NET_TYPES_TRUNC",
+ "NET_TYPES_ADVS",
+ "POLICY_GENERAL",
+ "POLICY_LOCATIONS",
+ "POLICY_CONTRACTS",
+ "REGIONS",
+ "POC_ROLES",
+ "MEDIA",
+ "PROTOCOLS",
+ "ORG_GROUPS",
+ "BOOL_CHOICE_STR",
+ "VISIBILITY",
]:
raise Exception("Unknown enum")
- return JsonResponse({
- "enum/%s" % name: [{
- "id": id,
- # as of django-peeringdb 1.0.0 already comes in
- # translated
- "name": decode(n)
- } for id, n in getattr(const, name.upper())]
- })
+ return JsonResponse(
+ {
+ "enum/%s"
+ % name: [
+ {
+ "id": id,
+ # as of django-peeringdb 1.0.0 already comes in
+ # translated
+ "name": decode(n),
+ }
+ for id, n in getattr(const, name.upper())
+ ]
+ }
+ )
def asns(request):
@@ -170,20 +193,20 @@ def organizations(request):
if not request.user.is_superuser:
return JsonResponse({}, status=403)
- return JsonResponse({
- "organizations": [{
- "id": o.id,
- "name": o.name
- } for o in Organization.objects.filter(status="ok").order_by("name")]
- })
+ return JsonResponse(
+ {
+ "organizations": [
+ {"id": o.id, "name": o.name}
+ for o in Organization.objects.filter(status="ok").order_by("name")
+ ]
+ }
+ )
def languages(request):
from django.conf import settings
+
cur_language = translation.get_language()
- return JsonResponse({
- "locales": [{
- "id": id,
- "name": _(name)
- } for (id, name) in settings.LANGUAGES]
- })
+ return JsonResponse(
+ {"locales": [{"id": id, "name": _(name)} for (id, name) in settings.LANGUAGES]}
+ )
diff --git a/peeringdb_server/deskpro.py b/peeringdb_server/deskpro.py
index b40446ed..4a80f282 100644
--- a/peeringdb_server/deskpro.py
+++ b/peeringdb_server/deskpro.py
@@ -15,8 +15,11 @@
def ticket_queue(subject, body, user):
""" queue a deskpro ticket for creation """
- ticket = DeskProTicket.objects.create(subject=u"{}{}".format(
- settings.EMAIL_SUBJECT_PREFIX, subject), body=body, user=user)
+ ticket = DeskProTicket.objects.create(
+ subject=u"{}{}".format(settings.EMAIL_SUBJECT_PREFIX, subject),
+ body=body,
+ user=user,
+ )
class APIError(IOError):
@@ -40,15 +43,13 @@ def ticket_queue_asnauto_skipvq(user, org, net, rir_data):
else:
org_name = org.name
- ticket_queue("[ASNAUTO] Network '%s' approved for existing Org '%s'" %
- (net_name, org_name),
- loader.get_template(
- 'email/notify-pdb-admin-asnauto-skipvq.txt').render({
- "user": user,
- "org": org,
- "net": net,
- "rir_data": rir_data
- }), user)
+ ticket_queue(
+ "[ASNAUTO] Network '%s' approved for existing Org '%s'" % (net_name, org_name),
+ loader.get_template("email/notify-pdb-admin-asnauto-skipvq.txt").render(
+ {"user": user, "org": org, "net": net, "rir_data": rir_data}
+ ),
+ user,
+ )
def ticket_queue_asnauto_affil(user, org, net, rir_data):
@@ -57,19 +58,18 @@ def ticket_queue_asnauto_affil(user, org, net, rir_data):
"""
ticket_queue(
- "[ASNAUTO] Ownership claim granted to Org '%s' for user '%s'" %
- (org.name, user.username),
- loader.get_template('email/notify-pdb-admin-asnauto-affil.txt').render(
- {
- "user": user,
- "org": org,
- "net": net,
- "rir_data": rir_data
- }), user)
-
-
-def ticket_queue_asnauto_create(user, org, net, rir_data, asn,
- org_created=False, net_created=False):
+ "[ASNAUTO] Ownership claim granted to Org '%s' for user '%s'"
+ % (org.name, user.username),
+ loader.get_template("email/notify-pdb-admin-asnauto-affil.txt").render(
+ {"user": user, "org": org, "net": net, "rir_data": rir_data}
+ ),
+ user,
+ )
+
+
+def ticket_queue_asnauto_create(
+ user, org, net, rir_data, asn, org_created=False, net_created=False
+):
"""
queue deskro ticket creation for asn automation action: create
"""
@@ -88,15 +88,20 @@ def ticket_queue_asnauto_create(user, org, net, rir_data, asn,
ticket_queue(
"[ASNAUTO] %s created" % subject,
loader.get_template(
- 'email/notify-pdb-admin-asnauto-entity-creation.txt').render({
+ "email/notify-pdb-admin-asnauto-entity-creation.txt"
+ ).render(
+ {
"user": user,
"org": org,
"net": net,
"asn": asn,
"org_created": org_created,
"net_created": net_created,
- "rir_data": rir_data
- }), user)
+ "rir_data": rir_data,
+ }
+ ),
+ user,
+ )
def ticket_queue_rdap_error(user, asn, error):
@@ -110,11 +115,11 @@ def ticket_queue_rdap_error(user, asn, error):
subject = "[RDAP_ERR] {} - AS{}".format(user.username, asn)
ticket_queue(
subject,
- loader.get_template('email/notify-pdb-admin-rdap-error.txt').render({
- "user": user,
- "asn": asn,
- "error_details": error_message
- }), user)
+ loader.get_template("email/notify-pdb-admin-rdap-error.txt").render(
+ {"user": user, "asn": asn, "error_details": error_message}
+ ),
+ user,
+ )
class APIClient(object):
@@ -143,42 +148,48 @@ def parse_response(self, response, many=False):
return data
def get(self, endpoint, param):
- response = requests.get("{}/{}".format(self.url, endpoint),
- params=param, headers=self.auth_headers)
+ response = requests.get(
+ "{}/{}".format(self.url, endpoint), params=param, headers=self.auth_headers
+ )
return self.parse_response(response)
def create(self, endpoint, param):
- response = requests.post("{}/{}".format(self.url, endpoint),
- json=param, headers=self.auth_headers)
+ response = requests.post(
+ "{}/{}".format(self.url, endpoint), json=param, headers=self.auth_headers
+ )
return self.parse_response(response)
def require_person(self, user):
person = self.get("people", {"primary_email": user.email})
if not person:
person = self.create(
- "people", {
+ "people",
+ {
"primary_email": user.email,
"first_name": user.first_name,
"last_name": user.last_name,
- "name": user.full_name
- })
+ "name": user.full_name,
+ },
+ )
return person
def create_ticket(self, ticket):
person = self.require_person(ticket.user)
ticket_response = self.create(
- "tickets", {
+ "tickets",
+ {
"subject": ticket.subject,
- "person": {
- "id": person["id"]
- },
- "status": "awaiting_agent"
- })
+ "person": {"id": person["id"]},
+ "status": "awaiting_agent",
+ },
+ )
self.create(
- "tickets/{}/messages".format(ticket_response["id"]), {
+ "tickets/{}/messages".format(ticket_response["id"]),
+ {
"message": ticket.body.replace("\n", "
\n"),
"person": person["id"],
- "format": "html"
- })
+ "format": "html",
+ },
+ )
diff --git a/peeringdb_server/export_views.py b/peeringdb_server/export_views.py
index 5d982a7d..d226556e 100644
--- a/peeringdb_server/export_views.py
+++ b/peeringdb_server/export_views.py
@@ -10,9 +10,8 @@
from django.utils.translation import ugettext_lazy as _
from rest_framework.test import APIRequestFactory
-from peeringdb_server.models import (IXLan, NetworkIXLan, InternetExchange)
-from peeringdb_server.rest import (
- REFTAG_MAP as RestViewSets, )
+from peeringdb_server.models import IXLan, NetworkIXLan, InternetExchange
+from peeringdb_server.rest import REFTAG_MAP as RestViewSets
from peeringdb_server.renderers import JSONEncoder
@@ -28,10 +27,7 @@ def export_ixf_ix_members(ixlans, pretty=False):
"version": "0.6",
"timestamp": datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%SZ"),
"member_list": member_list,
- "ixp_list": [{
- "ixp_id": ixp.id,
- "shortname": ixp.name
- } for ixp in ixp_list]
+ "ixp_list": [{"ixp_id": ixp.id, "shortname": ixp.name} for ixp in ixp_list],
}
for ixlan in ixlans:
@@ -47,30 +43,25 @@ def export_ixf_ix_members(ixlans, pretty=False):
"url": netixlan.network.website,
"contact_email": [
poc.email
- for poc in netixlan.network.poc_set_active.filter(
- visible="Public")
+ for poc in netixlan.network.poc_set_active.filter(visible="Public")
],
"contact_phone": [
poc.phone
- for poc in netixlan.network.poc_set_active.filter(
- visible="Public")
+ for poc in netixlan.network.poc_set_active.filter(visible="Public")
],
"peering_policy": netixlan.network.policy_general.lower(),
"peering_policy_url": netixlan.network.policy_url,
- "connection_list": connection_list
+ "connection_list": connection_list,
}
member_list.append(member)
asns.append(netixlan.asn)
- for _netixlan in ixlan.netixlan_set_active.filter(
- asn=netixlan.asn):
+ for _netixlan in ixlan.netixlan_set_active.filter(asn=netixlan.asn):
vlan_list = [{}]
connection = {
"ixp_id": _netixlan.ixlan.ix_id,
"state": "active",
- "if_list": [{
- "if_speed": _netixlan.speed
- }],
- "vlan_list": vlan_list
+ "if_list": [{"if_speed": _netixlan.speed}],
+ "vlan_list": vlan_list,
}
connection_list.append(connection)
@@ -79,14 +70,14 @@ def export_ixf_ix_members(ixlans, pretty=False):
"address": "{}".format(_netixlan.ipaddr4),
"routeserver": _netixlan.is_rs_peer,
"max_prefix": _netixlan.network.info_prefixes4,
- "as_macro": _netixlan.network.irr_as_set
+ "as_macro": _netixlan.network.irr_as_set,
}
if _netixlan.ipaddr6:
vlan_list[0]["ipv6"] = {
"address": "{}".format(_netixlan.ipaddr6),
"routeserver": _netixlan.is_rs_peer,
"max_prefix": _netixlan.network.info_prefixes6,
- "as_macro": _netixlan.network.irr_as_set
+ "as_macro": _netixlan.network.irr_as_set,
}
if pretty:
@@ -99,16 +90,20 @@ def view_export_ixf_ix_members(request, ix_id):
return HttpResponse(
export_ixf_ix_members(
IXLan.objects.filter(ix_id=ix_id, status="ok"),
- pretty=request.GET.has_key("pretty")),
- content_type="application/json")
+ pretty=request.GET.has_key("pretty"),
+ ),
+ content_type="application/json",
+ )
def view_export_ixf_ixlan_members(request, ixlan_id):
return HttpResponse(
export_ixf_ix_members(
IXLan.objects.filter(id=ixlan_id, status="ok"),
- pretty=request.GET.has_key("pretty")),
- content_type="application/json")
+ pretty=request.GET.has_key("pretty"),
+ ),
+ content_type="application/json",
+ )
class ExportView(View):
@@ -144,11 +139,10 @@ def get(self, request, fmt):
if self.download == True:
# send attachment header, triggering download on the client side
- filename = self.download_name.format(
- extension=self.extensions.get(fmt))
- response[
- 'Content-Disposition'] = 'attachment; filename="{}"'.format(
- filename)
+ filename = self.download_name.format(extension=self.extensions.get(fmt))
+ response["Content-Disposition"] = 'attachment; filename="{}"'.format(
+ filename
+ )
return response
except Exception as exc:
@@ -193,8 +187,8 @@ def response_json_pretty(self, data):
if self.json_root_key:
data = {self.json_root_key: data}
return HttpResponse(
- json.dumps(data, indent=2, cls=JSONEncoder),
- content_type="application/json")
+ json.dumps(data, indent=2, cls=JSONEncoder), content_type="application/json"
+ )
def response_csv(self, data):
"""
@@ -250,8 +244,9 @@ def fetch(self, request):
request_factory = APIRequestFactory()
viewset = RestViewSets[self.tag].as_view({"get": "list"})
- api_request = request_factory.get("/api/{}/?{}".format(
- self.tag, urllib.urlencode(params)))
+ api_request = request_factory.get(
+ "/api/{}/?{}".format(self.tag, urllib.urlencode(params))
+ )
# we want to use the same user as the original request
# so permissions are applied correctly
@@ -301,18 +296,21 @@ def generate_net(self, request):
download_data = []
for row in data:
download_data.append(
- collections.OrderedDict([
- ("Name", row["name"]),
- ("Also known as", row["aka"]),
- ("ASN", row["asn"]),
- ("General Policy", row["policy_general"]),
- ("Network Type", row["info_type"]),
- ("Network Scope", row["info_scope"]),
- ("Traffic Levels", row["info_traffic"]),
- ("Traffic Ratio", row["info_ratio"]),
- ("Exchanges", len(row["netixlan_set"])),
- ("Facilities", len(row["netfac_set"])),
- ]))
+ collections.OrderedDict(
+ [
+ ("Name", row["name"]),
+ ("Also known as", row["aka"]),
+ ("ASN", row["asn"]),
+ ("General Policy", row["policy_general"]),
+ ("Network Type", row["info_type"]),
+ ("Network Scope", row["info_scope"]),
+ ("Traffic Levels", row["info_traffic"]),
+ ("Traffic Ratio", row["info_ratio"]),
+ ("Exchanges", len(row["netixlan_set"])),
+ ("Facilities", len(row["netfac_set"])),
+ ]
+ )
+ )
return download_data
def generate_fac(self, request):
@@ -332,13 +330,19 @@ def generate_fac(self, request):
for row in data:
download_data.append(
collections.OrderedDict(
- [("Name", row["name"]), ("Management", row["org_name"]),
- ("CLLI", row["clli"]), ("NPA-NXX", row["npanxx"]),
- ("City", row["city"]), ("Country", row["country"]),
- ("State",
- row["state"]), ("Postal Code",
- row["zipcode"]), ("Networks",
- row["net_count"])]))
+ [
+ ("Name", row["name"]),
+ ("Management", row["org_name"]),
+ ("CLLI", row["clli"]),
+ ("NPA-NXX", row["npanxx"]),
+ ("City", row["city"]),
+ ("Country", row["country"]),
+ ("State", row["state"]),
+ ("Postal Code", row["zipcode"]),
+ ("Networks", row["net_count"]),
+ ]
+ )
+ )
return download_data
def generate_ix(self, request):
@@ -357,11 +361,14 @@ def generate_ix(self, request):
download_data = []
for row in data:
download_data.append(
- collections.OrderedDict([
- ("Name", row["name"]),
- ("Media Type", row["media"]),
- ("Country", row["country"]),
- ("City", row["city"]),
- ("Networks", row["net_count"])
- ]))
+ collections.OrderedDict(
+ [
+ ("Name", row["name"]),
+ ("Media Type", row["media"]),
+ ("Country", row["country"]),
+ ("City", row["city"]),
+ ("Networks", row["net_count"]),
+ ]
+ )
+ )
return download_data
diff --git a/peeringdb_server/forms.py b/peeringdb_server/forms.py
index 75d26c40..734a0d13 100644
--- a/peeringdb_server/forms.py
+++ b/peeringdb_server/forms.py
@@ -76,8 +76,7 @@ class PasswordChangeForm(forms.Form):
def clean_password(self):
password = self.cleaned_data.get("password")
if len(password) < 10:
- raise forms.ValidationError(
- _("Needs to be at least 10 characters long"))
+ raise forms.ValidationError(_("Needs to be at least 10 characters long"))
return password
def clean_password_v(self):
@@ -86,7 +85,8 @@ def clean_password_v(self):
if password != password_v:
raise forms.ValidationError(
- _("Passwords need to match"), code="password_mismatch")
+ _("Passwords need to match"), code="password_mismatch"
+ )
return password_v
@@ -112,31 +112,33 @@ class Meta:
"last_name",
)
-
def clean(self):
super(UserCreationForm, self).clean()
recaptcha = self.cleaned_data.get("recaptcha", "")
captcha = self.cleaned_data.get("captcha", "")
if not recaptcha and not captcha:
- raise forms.ValidationError(_("Please fill out the anti-spam challenge (captcha) field"))
+ raise forms.ValidationError(
+ _("Please fill out the anti-spam challenge (captcha) field")
+ )
elif recaptcha:
cpt_params = {
"secret": dj_settings.RECAPTCHA_SECRET_KEY,
"response": recaptcha,
- "remoteip": get_client_ip(self.request)
+ "remoteip": get_client_ip(self.request),
}
- cpt_response = requests.post(dj_settings.RECAPTCHA_VERIFY_URL,
- params=cpt_params).json()
+ cpt_response = requests.post(
+ dj_settings.RECAPTCHA_VERIFY_URL, params=cpt_params
+ ).json()
if not cpt_response.get("success"):
raise forms.ValidationError(_("reCAPTCHA invalid"))
else:
try:
hashkey, value = captcha.split(":")
- self.captcha_object = CaptchaStore.objects.get(response=value,
- hashkey=hashkey,
- expiration__gt=timezone.now())
+ self.captcha_object = CaptchaStore.objects.get(
+ response=value, hashkey=hashkey, expiration__gt=timezone.now()
+ )
except CaptchaStore.DoesNotExist:
raise forms.ValidationError(_("captcha invalid"))
@@ -158,4 +160,4 @@ def clean_locale(self):
class Meta:
model = User
- fields = ('locale')
+ fields = "locale"
diff --git a/peeringdb_server/import_views.py b/peeringdb_server/import_views.py
index 3e38b135..8ccb87f4 100644
--- a/peeringdb_server/import_views.py
+++ b/peeringdb_server/import_views.py
@@ -11,45 +11,56 @@
from peeringdb_server import ixf
from peeringdb_server.models import (
- IXLan, Network, NetworkIXLan,
- )
+ IXLan,
+ Network,
+ NetworkIXLan,
+)
RATELIMITS = settings.RATELIMITS
+
def enable_basic_auth(fn):
"""
a simple decorator to enable basic auth for a specific view
"""
+
def wrapped(request, *args, **kwargs):
- if 'HTTP_AUTHORIZATION' in request.META:
- auth = request.META['HTTP_AUTHORIZATION'].split()
+ if "HTTP_AUTHORIZATION" in request.META:
+ auth = request.META["HTTP_AUTHORIZATION"].split()
if len(auth) == 2:
if auth[0].lower() == "basic":
- username, password = base64.b64decode(auth[1]).split(':', 1)
+ username, password = base64.b64decode(auth[1]).split(":", 1)
request.user = authenticate(username=username, password=password)
if not request.user:
- return JsonResponse({"non_field_errors":["Invalid credentials"]}, status=401)
+ return JsonResponse(
+ {"non_field_errors": ["Invalid credentials"]}, status=401
+ )
return fn(request, *args, **kwargs)
+
return wrapped
+
def pretty_response(data):
- return HttpResponse(
- json.dumps(data, indent=2), content_type="application/json")
+ return HttpResponse(json.dumps(data, indent=2), content_type="application/json")
-def error_response(msg, status=400):
- return JsonResponse({"non_field_errors":[msg]},status=status)
+def error_response(msg, status=400):
+ return JsonResponse({"non_field_errors": [msg]}, status=status)
-@ratelimit(key="ip", rate=RATELIMITS["view_import_ixlan_ixf_preview"], group="ixf_preview")
+@ratelimit(
+ key="ip", rate=RATELIMITS["view_import_ixlan_ixf_preview"], group="ixf_preview"
+)
@enable_basic_auth
def view_import_ixlan_ixf_preview(request, ixlan_id):
# check if request was blocked by rate limiting
was_limited = getattr(request, "limited", False)
if was_limited:
- return error_response(_("Please wait a bit before requesting " \
- "another ixf import preview."), status=400)
+ return error_response(
+ _("Please wait a bit before requesting " "another ixf import preview."),
+ status=400,
+ )
try:
ixlan = IXLan.objects.get(id=ixlan_id)
@@ -65,7 +76,9 @@ def view_import_ixlan_ixf_preview(request, ixlan_id):
return pretty_response(importer.log)
-@ratelimit(key="ip", rate=RATELIMITS["view_import_net_ixf_postmortem"], group="ixf_postmortem")
+@ratelimit(
+ key="ip", rate=RATELIMITS["view_import_net_ixf_postmortem"], group="ixf_postmortem"
+)
@enable_basic_auth
def view_import_net_ixf_postmortem(request, net_id):
@@ -73,8 +86,10 @@ def view_import_net_ixf_postmortem(request, net_id):
was_limited = getattr(request, "limited", False)
if was_limited:
- return error_response(_("Please wait a bit before requesting " \
- "another IX-F import postmortem."), status=400)
+ return error_response(
+ _("Please wait a bit before requesting " "another IX-F import postmortem."),
+ status=400,
+ )
# load net
@@ -93,31 +108,37 @@ def view_import_net_ixf_postmortem(request, net_id):
except:
limit = 25
-
errors = []
if limit < 1:
limit = 1
elif limit > settings.IXF_POSTMORTEM_LIMIT:
- errors.append(_("Postmortem length cannot exceed {} entries").format(
- settings.IXF_POSTMORTEM_LIMIT))
+ errors.append(
+ _("Postmortem length cannot exceed {} entries").format(
+ settings.IXF_POSTMORTEM_LIMIT
+ )
+ )
post_mortem = ixf.PostMortem()
log = post_mortem.generate(net.asn, limit=limit)
- return pretty_response({"data":log, "non_field_errors":errors})
+ return pretty_response({"data": log, "non_field_errors": errors})
-@ratelimit(key="ip", rate=RATELIMITS["view_import_ixlan_ixf_preview"], group="ixf_preview")
+@ratelimit(
+ key="ip", rate=RATELIMITS["view_import_ixlan_ixf_preview"], group="ixf_preview"
+)
@enable_basic_auth
def view_import_net_ixf_preview(request, net_id):
# check if request was blocked by rate limiting
was_limited = getattr(request, "limited", False)
if was_limited:
- return error_response(_("Please wait a bit before requesting " \
- "another ixf import preview."), status=400)
+ return error_response(
+ _("Please wait a bit before requesting " "another ixf import preview."),
+ status=400,
+ )
try:
net = Network.objects.get(id=net_id, status="ok")
@@ -127,19 +148,17 @@ def view_import_net_ixf_preview(request, net_id):
if not has_perms(request.user, net, "update"):
return error_response(_("Permission denied"), status=403)
-
- total_log = {"data":[], "errors":[]}
+ total_log = {"data": [], "errors": []}
for ixlan in net.ixlan_set_ixf_enabled:
importer = ixf.Importer()
- importer.cache_only=True
- success, netixlans, deleted, log = importer.update(ixlan, asn=net.asn, save=False)
+ importer.cache_only = True
+ success, netixlans, deleted, log = importer.update(
+ ixlan, asn=net.asn, save=False
+ )
total_log["data"].extend(log["data"])
- total_log["errors"].extend(["{}({}): {}".format(ixlan.ix.name, ixlan.id, err)
- for err in log["errors"]])
-
+ total_log["errors"].extend(
+ ["{}({}): {}".format(ixlan.ix.name, ixlan.id, err) for err in log["errors"]]
+ )
return pretty_response(total_log)
-
-
-
diff --git a/peeringdb_server/inet.py b/peeringdb_server/inet.py
index ec1afeda..e6a7ef5a 100644
--- a/peeringdb_server/inet.py
+++ b/peeringdb_server/inet.py
@@ -43,7 +43,7 @@
# RFC 7003 - last asn 32-bit
ASN_LAST_32BIT,
# trans
- ASN_TRANS
+ ASN_TRANS,
]
# the following bogon asn ranges are allowed on envionments
@@ -72,10 +72,10 @@ class BogonAsn(rdap.RdapAsn):
def __init__(self, asn):
name = "AS{}".format(asn)
self._parsed = {
- "name":name,
- "org_name":name,
- "org_address":None,
- "emails":[]
+ "name": name,
+ "org_name": name,
+ "org_address": None,
+ "emails": [],
}
@@ -87,12 +87,11 @@ class RdapLookup(rdap.RdapClient):
def __init__(self):
# create rdap config
config = dict(
- bootstrap_url=settings.RDAP_URL.rstrip('/'),
+ bootstrap_url=settings.RDAP_URL.rstrip("/"),
lacnic_apikey=settings.RDAP_LACNIC_APIKEY,
)
super(RdapLookup, self).__init__(config)
-
def get_asn(self, asn):
"""
We handle asns that fall into the private/documentation ranges
@@ -103,12 +102,12 @@ def get_asn(self, asn):
if settings.TUTORIAL_MODE and asn_is_in_ranges(asn, TUTORIAL_ASN_RANGES):
return BogonAsn(asn)
else:
- raise RdapException(_("ASNs in this range " \
- "are not allowed in this environment"))
+ raise RdapException(
+ _("ASNs in this range " "are not allowed in this environment")
+ )
return super(RdapLookup, self).get_asn(asn)
-
def asn_is_bogon(asn):
"""
Test if an asn is bogon by being either in the documentation
@@ -178,11 +177,11 @@ def network_is_pdb_valid(network):
# 2002::/16 - RFC 3068 - 6to4 prefix
0x2002,
# 3ffe::/16 - RFC 5156 - used for the 6bone but was returned
- 0x3ffe,
+ 0x3FFE,
# fec0::/10 - RFC 4291 - Reserved by IETF
- 0xfec0,
+ 0xFEC0,
# ff00::/8 - RFC 4291 - Multicast
- 0xff00,
+ 0xFF00,
]
if int(network.network_address) >> 112 in v6_invalid:
@@ -267,14 +266,17 @@ def renumber_ipaddress(ipaddr, old_prefix, new_prefix):
# replace any octet that is not a zero in the netmask
- if (ipaddr.version == 4 and int(octet) > 0) or \
- (ipaddr.version == 6 and octet != "0000"):
+ if (ipaddr.version == 4 and int(octet) > 0) or (
+ ipaddr.version == 6 and octet != "0000"
+ ):
ip_octets[i] = new_octets[i]
i += 1
# return renumbered ipaddress
- return ipaddress.ip_address(u"{}".format(delimiter.join([str(o) for o in ip_octets])))
+ return ipaddress.ip_address(
+ u"{}".format(delimiter.join([str(o) for o in ip_octets]))
+ )
def get_client_ip(request):
@@ -284,6 +286,3 @@ def get_client_ip(request):
else:
ip = request.META.get("REMOTE_ADDR")
return ip
-
-
-
diff --git a/peeringdb_server/ixf.py b/peeringdb_server/ixf.py
index 8b8ca96b..9bd7821f 100644
--- a/peeringdb_server/ixf.py
+++ b/peeringdb_server/ixf.py
@@ -20,18 +20,20 @@
class Importer(object):
- allowed_member_types = ["peering",
- "ixp",
- "routeserver",
- "probono",
- ]
- allowed_states = ["",
- None,
- "active",
- "inactive",
- "connected",
- "operational",
- ]
+ allowed_member_types = [
+ "peering",
+ "ixp",
+ "routeserver",
+ "probono",
+ ]
+ allowed_states = [
+ "",
+ None,
+ "active",
+ "inactive",
+ "connected",
+ "operational",
+ ]
def __init__(self):
self.cache_only = False
@@ -101,7 +103,6 @@ def cache_key(self, url):
return "IXF-CACHE-{}".format(url)
-
def fetch_cached(self, url):
"""
Returns locally cached IX-F data
@@ -117,11 +118,12 @@ def fetch_cached(self, url):
data = cache.get(self.cache_key(url))
if data is None:
- return {"pdb_error": _("IX-F data not locally cached for this resource yet.")}
+ return {
+ "pdb_error": _("IX-F data not locally cached for this resource yet.")
+ }
return data
-
def sanitize(self, data):
"""
Takes ixf data dict and runs some sanitization on it
@@ -171,7 +173,6 @@ def update(self, ixlan, save=True, data=None, timeout=5, asn=None):
self.reset(ixlan=ixlan, save=save, asn=asn)
-
# if data is not provided, retrieve it either from cache or
# from the remote resource
if data is None:
@@ -199,8 +200,7 @@ def update(self, ixlan, save=True, data=None, timeout=5, asn=None):
except KeyError as exc:
# any key erros mean that the data is invalid, log the error and
# bail (transactions are atomic and will be rolled back)
- self.log_error("Internal Error 'KeyError': {}".format(exc),
- save=save)
+ self.log_error("Internal Error 'KeyError': {}".format(exc), save=save)
return (False, self.netixlans, [], self.log)
# process any netixlans that need to be deleted
@@ -239,29 +239,42 @@ def process_deletions(self):
ipv6 = "{}-{}".format(netixlan.asn, netixlan.ipaddr6)
if netixlan.asn not in self.asns:
- self.log_peer(netixlan.asn, "delete",
- _("ASN no longer in data"), netixlan)
+ self.log_peer(
+ netixlan.asn, "delete", _("ASN no longer in data"), netixlan
+ )
self.netixlans_deleted.append(netixlan)
if self.save:
netixlan.delete()
elif ipv4 not in self.ipaddresses and ipv6 not in self.ipaddresses:
- self.log_peer(netixlan.asn, "delete",
- _("Ip addresses no longer exist in validated data or are "\
- "no longer with this asn"), netixlan)
+ self.log_peer(
+ netixlan.asn,
+ "delete",
+ _(
+ "Ip addresses no longer exist in validated data or are "
+ "no longer with this asn"
+ ),
+ netixlan,
+ )
self.netixlans_deleted.append(netixlan)
if self.save:
netixlan.delete()
- elif (netixlan.ipaddr4 and ipv4 not in self.ipaddresses) or \
- (netixlan.ipaddr6 and ipv6 not in self.ipaddresses):
+ elif (netixlan.ipaddr4 and ipv4 not in self.ipaddresses) or (
+ netixlan.ipaddr6 and ipv6 not in self.ipaddresses
+ ):
if not netixlan.network.allow_ixp_update:
- self.log_peer(netixlan.asn, "delete",
- _("At least one ipaddress mismatched and "\
- "network has disabled updates"), netixlan)
+ self.log_peer(
+ netixlan.asn,
+ "delete",
+ _(
+ "At least one ipaddress mismatched and "
+ "network has disabled updates"
+ ),
+ netixlan,
+ )
self.netixlans_deleted.append(netixlan)
if self.save:
netixlan.delete()
-
@transaction.atomic()
def archive(self):
"""
@@ -269,22 +282,22 @@ def archive(self):
"""
if self.save and (self.netixlans or self.netixlans_deleted):
- persist_log = IXLanIXFMemberImportLog.objects.create(
- ixlan=self.ixlan)
+ persist_log = IXLanIXFMemberImportLog.objects.create(ixlan=self.ixlan)
for netixlan in self.netixlans + self.netixlans_deleted:
- versions = reversion.models.Version.objects.get_for_object(
- netixlan)
+ versions = reversion.models.Version.objects.get_for_object(netixlan)
if len(versions) == 1:
version_before = None
else:
version_before = versions[1]
version_after = versions[0]
info = self.archive_info.get(netixlan.id, {})
- persist_log.entries.create(netixlan=netixlan,
- version_before=version_before,
- action=info.get("action"),
- reason=info.get("reason"),
- version_after=version_after)
+ persist_log.entries.create(
+ netixlan=netixlan,
+ version_before=version_before,
+ action=info.get("action"),
+ reason=info.get("reason"),
+ version_after=version_after,
+ )
def parse(self, data):
"""
@@ -323,18 +336,23 @@ def parse_members(self, member_list):
network = Network.objects.get(asn=asn)
if network.status != "ok":
self.log_peer(
- asn, "ignore",
- _("Network status is '{}'").format(network.status))
+ asn,
+ "ignore",
+ _("Network status is '{}'").format(network.status),
+ )
continue
self.parse_connections(
- member.get("connection_list", []), network, member)
+ member.get("connection_list", []), network, member
+ )
else:
- self.log_peer(asn, "ignore",
- _("Network does not exist in peeringdb"))
+ self.log_peer(
+ asn, "ignore", _("Network does not exist in peeringdb")
+ )
else:
- self.log_peer(asn, "ignore",
- _("Invalid member type: {}").format(member_type))
+ self.log_peer(
+ asn, "ignore", _("Invalid member type: {}").format(member_type)
+ )
def parse_connections(self, connection_list, network, member):
"""
@@ -354,11 +372,12 @@ def parse_connections(self, connection_list, network, member):
speed = self.parse_speed(connection.get("if_list", []))
self.parse_vlans(
- connection.get("vlan_list", []), network, member,
- connection, speed)
+ connection.get("vlan_list", []), network, member, connection, speed
+ )
else:
- self.log_peer(asn, "ignore",
- _("Invalid connection state: {}").format(state))
+ self.log_peer(
+ asn, "ignore", _("Invalid connection state: {}").format(state)
+ )
def parse_vlans(self, vlan_list, network, member, connection, speed):
"""
@@ -382,9 +401,12 @@ def parse_vlans(self, vlan_list, network, member, connection, speed):
# vlan entry has no ipaddresses set, log and ignore
if not ipv4 and not ipv6:
- self.log_error(_("Could not find ipv4 or 6 address in " \
- "vlan_list entry for vlan_id {} (AS{})").format(
- lan.get("vlan_id"), asn))
+ self.log_error(
+ _(
+ "Could not find ipv4 or 6 address in "
+ "vlan_list entry for vlan_id {} (AS{})"
+ ).format(lan.get("vlan_id"), asn)
+ )
continue
ipv4_addr = ipv4.get("address")
@@ -396,56 +418,64 @@ def parse_vlans(self, vlan_list, network, member, connection, speed):
# dropped during `process_deletions`
try:
if ipv4_addr:
- self.ipaddresses.append("{}-{}".format(
- asn, ipaddress.ip_address(unicode(ipv4_addr))))
+ self.ipaddresses.append(
+ "{}-{}".format(asn, ipaddress.ip_address(unicode(ipv4_addr)))
+ )
if ipv6_addr:
- self.ipaddresses.append("{}-{}".format(
- asn, ipaddress.ip_address(unicode(ipv6_addr))))
+ self.ipaddresses.append(
+ "{}-{}".format(asn, ipaddress.ip_address(unicode(ipv6_addr)))
+ )
except (ipaddress.AddressValueError, ValueError) as exc:
self.log_error(
- _("Ip address error '{}' in vlan_list entry for vlan_id {}"
- ).format(exc, lan.get("vlan_id")))
+ _("Ip address error '{}' in vlan_list entry for vlan_id {}").format(
+ exc, lan.get("vlan_id")
+ )
+ )
continue
netixlan_info = NetworkIXLan(
- ixlan=self.ixlan,
- network=network,
- ipaddr4=ipv4_addr,
- ipaddr6=ipv6_addr,
- speed=speed,
- asn=asn,
- is_rs_peer=(ipv4.get("routeserver", False) or \
- ipv6.get("routeserver", False))
+ ixlan=self.ixlan,
+ network=network,
+ ipaddr4=ipv4_addr,
+ ipaddr6=ipv6_addr,
+ speed=speed,
+ asn=asn,
+ is_rs_peer=(
+ ipv4.get("routeserver", False) or ipv6.get("routeserver", False)
+ ),
)
- if not self.save and (not self.ixlan.test_ipv4_address(ipv4_addr) and not \
- self.ixlan.test_ipv6_address(ipv6_addr)):
- #for the preview we don't care at all about new ip addresses
- #not at the ixlan if they dont match the prefix
+ if not self.save and (
+ not self.ixlan.test_ipv4_address(ipv4_addr)
+ and not self.ixlan.test_ipv6_address(ipv6_addr)
+ ):
+ # for the preview we don't care at all about new ip addresses
+ # not at the ixlan if they dont match the prefix
continue
-
# if connection state is inactive we won't create or update
if connection.get("state", "active") == "inactive":
- self.log_peer(asn, "noop",
- _("Connection is currently marked as inactive"),
- netixlan_info)
+ self.log_peer(
+ asn,
+ "noop",
+ _("Connection is currently marked as inactive"),
+ netixlan_info,
+ )
continue
-
# after this point we either add or modify the netixlan, so
# now is a good time to check if the related network allows
# such updates, bail if not
if not network.allow_ixp_update:
- self.log_peer(asn, "noop",
- _("Network has disabled ixp updates"),
- netixlan_info)
+ self.log_peer(
+ asn, "noop", _("Network has disabled ixp updates"), netixlan_info
+ )
continue
-
# add / modify the netixlan
- result = self.ixlan.add_netixlan(netixlan_info, save=self.save,
- save_others=self.save)
+ result = self.ixlan.add_netixlan(
+ netixlan_info, save=self.save, save_others=self.save
+ )
if result["netixlan"] and result["changed"]:
self.netixlans.append(result["netixlan"])
@@ -455,14 +485,14 @@ def parse_vlans(self, vlan_list, network, member, connection, speed):
else:
action = "modify"
reason = _("Fields changed: {}").format(
- ", ".join(result.get("changed")))
+ ", ".join(result.get("changed"))
+ )
self.log_peer(asn, action, reason, result["netixlan"])
elif result["netixlan"]:
self.log_peer(asn, "noop", _("No changes"), result["netixlan"])
elif result["log"]:
- self.log_peer(asn, "ignore", "\n".join(result["log"]),
- netixlan_info)
+ self.log_peer(asn, "ignore", "\n".join(result["log"]), netixlan_info)
def parse_speed(self, if_list):
"""
@@ -480,7 +510,8 @@ def parse_speed(self, if_list):
speed += int(iface.get("if_speed", 0))
except ValueError:
self.log_error(
- _("Invalid speed value: {}").format(iface.get("if_speed")))
+ _("Invalid speed value: {}").format(iface.get("if_speed"))
+ )
return speed
def save_log(self):
@@ -488,8 +519,8 @@ def save_log(self):
Save the attempt log
"""
IXLanIXFMemberImportAttempt.objects.update_or_create(
- ixlan=self.ixlan,
- defaults={"info": "\n".join(json.dumps(self.log))})
+ ixlan=self.ixlan, defaults={"info": "\n".join(json.dumps(self.log))}
+ )
def reset_log(self):
"""
@@ -518,26 +549,25 @@ def log_peer(self, asn, action, reason, netixlan=None):
}
if netixlan:
- peer.update({
- "net_id": netixlan.network_id,
- "ipaddr4": u"{}".format(netixlan.ipaddr4 or ""),
- "ipaddr6": u"{}".format(netixlan.ipaddr6 or ""),
- "speed": netixlan.speed,
- "is_rs_peer": netixlan.is_rs_peer,
- })
+ peer.update(
+ {
+ "net_id": netixlan.network_id,
+ "ipaddr4": u"{}".format(netixlan.ipaddr4 or ""),
+ "ipaddr6": u"{}".format(netixlan.ipaddr6 or ""),
+ "speed": netixlan.speed,
+ "is_rs_peer": netixlan.is_rs_peer,
+ }
+ )
if netixlan.id:
- self.archive_info[netixlan.id] = {"action":action, "reason":u"{}".format(reason)}
-
- self.log["data"].append({
- "peer": peer,
- "action": action,
- "reason": u"{}".format(reason),
- })
-
-
-
+ self.archive_info[netixlan.id] = {
+ "action": action,
+ "reason": u"{}".format(reason),
+ }
+ self.log["data"].append(
+ {"peer": peer, "action": action, "reason": u"{}".format(reason),}
+ )
def log_error(self, error, save=False):
"""
@@ -548,7 +578,6 @@ def log_error(self, error, save=False):
self.save_log()
-
class PostMortem(object):
"""
@@ -574,7 +603,7 @@ def reset(self, asn, **kwargs):
self.asn = asn
self.limit = kwargs.get("limit", 100)
- self.post_mortem =[]
+ self.post_mortem = []
def generate(self, asn, **kwargs):
"""
@@ -599,7 +628,6 @@ def generate(self, asn, **kwargs):
self._process_logs(limit=self.limit)
return self.post_mortem
-
def _process_logs(self, limit=100):
"""
@@ -617,12 +645,11 @@ def _process_logs(self, limit=100):
qset = IXLanIXFMemberImportLogEntry.objects.filter(netixlan__asn=self.asn)
qset = qset.exclude(action__isnull=True)
qset = qset.order_by("-log__created")
- qset = qset.select_related("log","netixlan","log__ixlan","log__ixlan__ix")
+ qset = qset.select_related("log", "netixlan", "log__ixlan", "log__ixlan__ix")
for entry in qset[:limit]:
self._process_log_entry(entry.log, entry)
-
def _process_log_entry(self, log, entry):
"""
@@ -652,19 +679,19 @@ def _process_log_entry(self, log, entry):
else:
ipaddr6 = None
-
- self.post_mortem.append({
- "ix_id": log.ixlan.ix.id,
- "ix_name": log.ixlan.ix.name,
- "ixlan_id": log.ixlan.id,
- "changes": entry.changes,
- "reason": entry.reason,
- "action": entry.action,
- "asn": data.get("asn"),
- "ipaddr4": ipaddr4,
- "ipaddr6": ipaddr6,
- "speed": data.get("speed"),
- "is_rs_peer": data.get("is_rs_peer"),
- "created": log.created.strftime("%Y-%m-%d %H:%M:%S"),
- })
-
+ self.post_mortem.append(
+ {
+ "ix_id": log.ixlan.ix.id,
+ "ix_name": log.ixlan.ix.name,
+ "ixlan_id": log.ixlan.id,
+ "changes": entry.changes,
+ "reason": entry.reason,
+ "action": entry.action,
+ "asn": data.get("asn"),
+ "ipaddr4": ipaddr4,
+ "ipaddr6": ipaddr6,
+ "speed": data.get("speed"),
+ "is_rs_peer": data.get("is_rs_peer"),
+ "created": log.created.strftime("%Y-%m-%d %H:%M:%S"),
+ }
+ )
diff --git a/peeringdb_server/mail.py b/peeringdb_server/mail.py
index 725fd83e..ec783c05 100644
--- a/peeringdb_server/mail.py
+++ b/peeringdb_server/mail.py
@@ -5,8 +5,9 @@
from django.utils.translation import ugettext_lazy as _, override
-def mail_admins_with_from(subj, msg, from_addr, fail_silently=False,
- connection=None, html_message=None):
+def mail_admins_with_from(
+ subj, msg, from_addr, fail_silently=False, connection=None, html_message=None
+):
"""
mail admins but allow specifying of from address
"""
@@ -17,8 +18,12 @@ def mail_admins_with_from(subj, msg, from_addr, fail_silently=False,
# set plain text message
msg_raw = strip_tags(msg)
mail = EmailMultiAlternatives(
- "%s%s" % (settings.EMAIL_SUBJECT_PREFIX, subj), msg, from_addr,
- [a[1] for a in settings.ADMINS], connection=connection)
+ "%s%s" % (settings.EMAIL_SUBJECT_PREFIX, subj),
+ msg,
+ from_addr,
+ [a[1] for a in settings.ADMINS],
+ connection=connection,
+ )
# attach html message
mail.attach_alternative(msg.replace("\n", "
\n"), "text/html")
@@ -26,8 +31,7 @@ def mail_admins_with_from(subj, msg, from_addr, fail_silently=False,
mail.send(fail_silently=fail_silently)
-def mail_users_entity_merge(users_source, users_target, entity_source,
- entity_target):
+def mail_users_entity_merge(users_source, users_target, entity_source, entity_target):
"""
notifies the users specified in users_source that their entity (entity_source) has
been merged with another entity (entity_target)
@@ -41,22 +45,29 @@ def mail_users_entity_merge(users_source, users_target, entity_source,
- entity_source : handleref object, entity that was merged
- entity_target : handleref object, entity that was merged into
"""
- msg = loader.get_template('email/notify-org-admin-merge.txt').render({
- "entity_type_name": entity_source._meta.verbose_name.capitalize(),
- "entity_source": entity_source,
- "entity_target": entity_target,
- "entity_target_url": "{}/{}/{}".format(
- settings.BASE_URL, entity_target.ref_tag, entity_target.id),
- "support_email": settings.DEFAULT_FROM_EMAIL
- })
+ msg = loader.get_template("email/notify-org-admin-merge.txt").render(
+ {
+ "entity_type_name": entity_source._meta.verbose_name.capitalize(),
+ "entity_source": entity_source,
+ "entity_target": entity_target,
+ "entity_target_url": "{}/{}/{}".format(
+ settings.BASE_URL, entity_target.ref_tag, entity_target.id
+ ),
+ "support_email": settings.DEFAULT_FROM_EMAIL,
+ }
+ )
for user in set([u for u in users_source] + [u for u in users_target]):
- #FIXME: why not have the `override` call in email_user in the first place?
+ # FIXME: why not have the `override` call in email_user in the first place?
with override(user.locale):
user.email_user(
_(u"{} Merge Notification: {} -> {}").format(
entity_source._meta.verbose_name.capitalize(),
- entity_source.name, entity_target.name), msg)
+ entity_source.name,
+ entity_target.name,
+ ),
+ msg,
+ )
def mail_username_retrieve(email, secret):
@@ -69,15 +80,17 @@ def mail_username_retrieve(email, secret):
- secret : username retrieval secret in the user's session
"""
- msg = loader.get_template('email/username-retrieve.txt').render({
- "email": email,
- "secret": secret,
- "username_retrieve_url": "{}/username-retrieve/complete?secret={}"
- .format(settings.BASE_URL, secret)
- })
+ msg = loader.get_template("email/username-retrieve.txt").render(
+ {
+ "email": email,
+ "secret": secret,
+ "username_retrieve_url": "{}/username-retrieve/complete?secret={}".format(
+ settings.BASE_URL, secret
+ ),
+ }
+ )
subject = "PeeringDB username retrieval"
- mail = EmailMultiAlternatives(subject, msg, settings.DEFAULT_FROM_EMAIL,
- [email])
+ mail = EmailMultiAlternatives(subject, msg, settings.DEFAULT_FROM_EMAIL, [email])
mail.send(fail_silently=False)
diff --git a/peeringdb_server/maintenance.py b/peeringdb_server/maintenance.py
index 83e402f6..ca725a71 100644
--- a/peeringdb_server/maintenance.py
+++ b/peeringdb_server/maintenance.py
@@ -9,6 +9,7 @@
from peeringdb_server import settings
+
def on(timeout=None):
"""
turns maintenance mode on
@@ -18,22 +19,26 @@ def on(timeout=None):
- timeout: if specified will automatically
end maintenance mode after n seconds
"""
- open(settings.MAINTENANCE_MODE_LOCKFILE, 'ab', 0).close()
+ open(settings.MAINTENANCE_MODE_LOCKFILE, "ab", 0).close()
+
def off():
""" turn maintenance mode off """
if active():
os.remove(settings.MAINTENANCE_MODE_LOCKFILE)
+
def active():
""" return True if maintenance mode is currently active """
return os.path.isfile(settings.MAINTENANCE_MODE_LOCKFILE)
+
def raise_if_active():
""" raise ActionBlocked exception if maintenance mode is active """
if active():
raise ActionBlocked()
+
class Middleware(object):
"""
@@ -45,7 +50,7 @@ def process_request(self, request):
if not active():
return None
- if request.method.lower() in ["post","put","patch","delete"]:
+ if request.method.lower() in ["post", "put", "patch", "delete"]:
view, args, kwargs = resolve(request.path_info)
@@ -54,15 +59,21 @@ def process_request(self, request):
return None
elif hasattr(view, "cls") and issubclass(view.cls, ModelViewSet):
# api response
- return JsonResponse({"meta":{"error":str(ActionBlocked())}}, status=503)
+ return JsonResponse(
+ {"meta": {"error": str(ActionBlocked())}}, status=503
+ )
else:
# other
fn, args, kwargs = resolve(reverse("maintenance"))
- return JsonResponse({"non_field_errors":[str(ActionBlocked())]}, status=503)
+ return JsonResponse(
+ {"non_field_errors": [str(ActionBlocked())]}, status=503
+ )
else:
return None
class ActionBlocked(Exception):
def __init__(self):
- super(ActionBlocked, self).__init__("The site is currently in maintenance mode, during which this action is disabled, please try again in a few minutes")
+ super(ActionBlocked, self).__init__(
+ "The site is currently in maintenance mode, during which this action is disabled, please try again in a few minutes"
+ )
diff --git a/peeringdb_server/management/commands/_db_command.py b/peeringdb_server/management/commands/_db_command.py
index 982a62b1..ea3bb350 100644
--- a/peeringdb_server/management/commands/_db_command.py
+++ b/peeringdb_server/management/commands/_db_command.py
@@ -6,9 +6,15 @@
from optparse import make_option
MODELS = [
- pdbm.Organization, pdbm.Network, pdbm.InternetExchange, pdbm.Facility,
- pdbm.NetworkContact, pdbm.NetworkFacility, pdbm.IXLan, pdbm.IXLanPrefix,
- pdbm.NetworkIXLan
+ pdbm.Organization,
+ pdbm.Network,
+ pdbm.InternetExchange,
+ pdbm.Facility,
+ pdbm.NetworkContact,
+ pdbm.NetworkFacility,
+ pdbm.IXLan,
+ pdbm.IXLanPrefix,
+ pdbm.NetworkIXLan,
]
@@ -52,8 +58,9 @@ def handle(self, *args, **options):
content_type = ContentType.objects.get_for_model(model)
for id in ids:
- versions = Version.objects.filter(content_type=content_type,
- object_id_int=id)
+ versions = Version.objects.filter(
+ content_type=content_type, object_id_int=id
+ )
print "%s - %d:" % (ref_tag, id)
self.print_line()
prev = {}
@@ -61,8 +68,11 @@ def handle(self, *args, **options):
for version in versions:
data = json.loads(version.serialized_data)[0].get("fields")
n += 1
- print "VERSION: %d - %s - User: %s" % (n, data.get("updated"),
- version.revision.user)
+ print "VERSION: %d - %s - User: %s" % (
+ n,
+ data.get("updated"),
+ version.revision.user,
+ )
if not prev:
for k, v in data.items():
print "%s: '%s'" % (k, v)
diff --git a/peeringdb_server/management/commands/pdb_api_cache.py b/peeringdb_server/management/commands/pdb_api_cache.py
index f7a7a565..1af9609d 100644
--- a/peeringdb_server/management/commands/pdb_api_cache.py
+++ b/peeringdb_server/management/commands/pdb_api_cache.py
@@ -11,9 +11,15 @@
from rest_framework.test import APIRequestFactory
MODELS = [
- pdbm.Organization, pdbm.Network, pdbm.InternetExchange, pdbm.Facility,
- pdbm.NetworkContact, pdbm.NetworkFacility, pdbm.IXLan, pdbm.IXLanPrefix,
- pdbm.NetworkIXLan
+ pdbm.Organization,
+ pdbm.Network,
+ pdbm.InternetExchange,
+ pdbm.Facility,
+ pdbm.NetworkContact,
+ pdbm.NetworkFacility,
+ pdbm.IXLan,
+ pdbm.IXLanPrefix,
+ pdbm.NetworkIXLan,
]
VIEWSETS = {
@@ -26,7 +32,7 @@
"ixpfx": pdbr.IXLanPrefixViewSet,
"netfac": pdbr.NetworkFacilityViewSet,
"netixlan": pdbr.NetworkIXLanViewSet,
- "poc": pdbr.NetworkContactViewSet
+ "poc": pdbr.NetworkContactViewSet,
}
settings.DEBUG = False
@@ -36,11 +42,14 @@ class Command(BaseCommand):
help = "Regen the api cache files"
def add_arguments(self, parser):
- parser.add_argument("--only", action="store", default=False,
- help="only run specified type")
parser.add_argument(
- "--date", action="store", default=None, help=
- "generate cache for objects create before or at the specified date (YYYYMMDD)"
+ "--only", action="store", default=False, help="only run specified type"
+ )
+ parser.add_argument(
+ "--date",
+ action="store",
+ default=None,
+ help="generate cache for objects create before or at the specified date (YYYYMMDD)",
)
def log(self, id, msg):
@@ -53,8 +62,8 @@ def row_datetime(self, row, field="created"):
return datetime.datetime.strptime(row.get(field), "%Y-%m-%dT%H:%M:%SZ")
def handle(self, *args, **options):
- only = options.get('only', None)
- date = options.get('date', None)
+ only = options.get("only", None)
+ date = options.get("date", None)
if only:
only = only.split(",")
@@ -65,8 +74,7 @@ def handle(self, *args, **options):
dt = datetime.datetime.now()
dtstr = dt.strftime("%Y-%m-%dT%H:%M:%SZ")
self.log_file = open(settings.API_CACHE_LOG, "w+")
- self.log("info",
- "Regnerating cache files to '%s'" % settings.API_CACHE_ROOT)
+ self.log("info", "Regnerating cache files to '%s'" % settings.API_CACHE_ROOT)
self.log("info", "Caching data for timestamp: %s" % dtstr)
rf = APIRequestFactory()
renderer = MetaJSONRenderer()
@@ -87,26 +95,26 @@ def handle(self, *args, **options):
for depth in [0, 1, 2, 3]:
self.log(tag, "generating depth %d" % depth)
if depth:
- req = rf.get('/api/%s?depth=%d&updated__lte=%s&_ctf' %
- (tag, depth, dtstr))
+ req = rf.get(
+ "/api/%s?depth=%d&updated__lte=%s&_ctf"
+ % (tag, depth, dtstr)
+ )
else:
- req = rf.get('/api/%s?updated__lte=%s&_ctf' % (tag,
- dtstr))
+ req = rf.get("/api/%s?updated__lte=%s&_ctf" % (tag, dtstr))
req.user = su
- vs = viewset.as_view({'get': 'list'})
+ vs = viewset.as_view({"get": "list"})
res = vs(req)
cache["%s-%s" % (tag, depth)] = renderer.render(
- res.data, renderer_context={
- "response": res
- })
+ res.data, renderer_context={"response": res}
+ )
del res
del vs
for id, data in cache.items():
self.log(id, "saving file")
with open(
- os.path.join(settings.API_CACHE_ROOT,
- "%s.json" % (id)), "w+") as output:
+ os.path.join(settings.API_CACHE_ROOT, "%s.json" % (id)), "w+"
+ ) as output:
output.write(data)
except Exception:
diff --git a/peeringdb_server/management/commands/pdb_api_test.py b/peeringdb_server/management/commands/pdb_api_test.py
index e390c7ad..da712140 100644
--- a/peeringdb_server/management/commands/pdb_api_test.py
+++ b/peeringdb_server/management/commands/pdb_api_test.py
@@ -13,8 +13,18 @@
from types import NoneType
-from twentyc.rpc import RestClient, PermissionDeniedException, InvalidRequestException, NotFoundException
-from django_namespace_perms.constants import PERM_READ, PERM_UPDATE, PERM_CREATE, PERM_DELETE
+from twentyc.rpc import (
+ RestClient,
+ PermissionDeniedException,
+ InvalidRequestException,
+ NotFoundException,
+)
+from django_namespace_perms.constants import (
+ PERM_READ,
+ PERM_UPDATE,
+ PERM_CREATE,
+ PERM_DELETE,
+)
from django.core.management.base import BaseCommand
from django.contrib.auth.models import Group
from django.conf import settings
@@ -23,9 +33,20 @@
from rest_framework import serializers
from peeringdb_server.models import (
- REFTAG_MAP, QUEUE_ENABLED, User, Organization, Network, InternetExchange,
- Facility, NetworkContact, NetworkIXLan, NetworkFacility, IXLan,
- IXLanPrefix, InternetExchangeFacility)
+ REFTAG_MAP,
+ QUEUE_ENABLED,
+ User,
+ Organization,
+ Network,
+ InternetExchange,
+ Facility,
+ NetworkContact,
+ NetworkIXLan,
+ NetworkFacility,
+ IXLan,
+ IXLanPrefix,
+ InternetExchangeFacility,
+)
from peeringdb_server.serializers import REFTAG_MAP as REFTAG_MAP_SLZ
from peeringdb_server import inet, settings as pdb_settings
@@ -39,7 +60,7 @@
"lte": "LessEqual",
"gt": "Greater",
"gte": "GreaterEqual",
- "": "Equal"
+ "": "Equal",
}
DATETIME = datetime.datetime.now()
@@ -49,7 +70,7 @@
DATES = {
"today": (DATE, DATE.strftime("%Y-%m-%d")),
"yesterday": (DATE_YDAY, DATE_YDAY.strftime("%Y-%m-%d")),
- "tomorrow": (DATE_TMRW, DATE_TMRW.strftime("%Y-%m-%d"))
+ "tomorrow": (DATE_TMRW, DATE_TMRW.strftime("%Y-%m-%d")),
}
# entity names
@@ -70,18 +91,9 @@
USER_ORG_MEMBER = {"user": "api_test_org_member", "password": "89c8ec05-b897"}
USER_CRUD = {
- "delete": {
- "user": "api_test_crud_delete",
- "password": "89c8ec05-b897"
- },
- "update": {
- "user": "api_test_crud_update",
- "password": "89c8ec05-b897"
- },
- "create": {
- "user": "api_test_crud_create",
- "password": "89c8ec05-b897"
- }
+ "delete": {"user": "api_test_crud_delete", "password": "89c8ec05-b897"},
+ "update": {"user": "api_test_crud_update", "password": "89c8ec05-b897"},
+ "create": {"user": "api_test_crud_create", "password": "89c8ec05-b897"},
}
# server location
@@ -136,7 +148,11 @@ class TestJSON(unittest.TestCase):
@classmethod
def get_ip6(cls, ixlan):
hosts = []
- for host in ixlan.ixpfx_set.filter(status=ixlan.status, protocol=6).first().prefix.hosts():
+ for host in (
+ ixlan.ixpfx_set.filter(status=ixlan.status, protocol=6)
+ .first()
+ .prefix.hosts()
+ ):
if len(hosts) < 100:
hosts.append(host)
else:
@@ -149,7 +165,11 @@ def get_ip6(cls, ixlan):
@classmethod
def get_ip4(cls, ixlan):
hosts = []
- for host in ixlan.ixpfx_set.filter(status=ixlan.status, protocol=4).first().prefix.hosts():
+ for host in (
+ ixlan.ixpfx_set.filter(status=ixlan.status, protocol=4)
+ .first()
+ .prefix.hosts()
+ ):
if len(hosts) < 100:
hosts.append(host)
else:
@@ -159,7 +179,6 @@ def get_ip4(cls, ixlan):
cls.IP4_COUNT += 1
return r
-
@classmethod
def get_prefix4(cls):
r = u"206.41.{}.0/24".format(cls.PREFIX_COUNT)
@@ -172,31 +191,35 @@ def get_prefix6(cls):
cls.PREFIX_COUNT += 1
return r
-
def setUp(self):
self.db_guest = self.rest_client(URL, verbose=VERBOSE)
self.db_user = self.rest_client(URL, verbose=VERBOSE, **USER)
- self.db_org_member = self.rest_client(URL, verbose=VERBOSE,
- **USER_ORG_MEMBER)
- self.db_org_admin = self.rest_client(URL, verbose=VERBOSE,
- **USER_ORG_ADMIN)
+ self.db_org_member = self.rest_client(URL, verbose=VERBOSE, **USER_ORG_MEMBER)
+ self.db_org_admin = self.rest_client(URL, verbose=VERBOSE, **USER_ORG_ADMIN)
for p, specs in USER_CRUD.items():
- setattr(self, "db_crud_%s" % p,
- self.rest_client(URL, verbose=VERBOSE, **specs))
+ setattr(
+ self, "db_crud_%s" % p, self.rest_client(URL, verbose=VERBOSE, **specs)
+ )
def all_dbs(self, exclude=[]):
return [
db
for db in [
- self.db_guest, self.db_org_member, self.db_user,
- self.db_org_admin, self.db_crud_create, self.db_crud_delete,
- self.db_crud_update
- ] if db not in exclude
+ self.db_guest,
+ self.db_org_member,
+ self.db_user,
+ self.db_org_admin,
+ self.db_crud_create,
+ self.db_crud_delete,
+ self.db_crud_update,
+ ]
+ if db not in exclude
]
def readonly_dbs(self, exclude=[]):
return [
- db for db in [self.db_guest, self.db_org_member, self.db_user]
+ db
+ for db in [self.db_guest, self.db_org_member, self.db_user]
if db not in exclude
]
@@ -213,7 +236,7 @@ def make_data_org(self, **kwargs):
"city": CITY,
"country": COUNTRY,
"state": "state",
- "zipcode": "12345"
+ "zipcode": "12345",
}
data.update(**kwargs)
return data
@@ -239,7 +262,7 @@ def make_data_ix(self, **kwargs):
"tech_email": EMAIL,
"tech_phone": PHONE,
"policy_email": EMAIL,
- "policy_phone": PHONE
+ "policy_phone": PHONE,
}
data.update(**kwargs)
return data
@@ -257,7 +280,7 @@ def make_data_fac(self, **kwargs):
"npanxx": "000-111",
"latitude": None,
"longitude": None,
- "notes": NOTE
+ "notes": NOTE,
}
data.update(**kwargs)
return data
@@ -295,7 +318,7 @@ def make_data_net(self, **kwargs):
"policy_general": "Restrictive",
"policy_locations": "Required - International",
"policy_ratio": True,
- "policy_contracts": "Required"
+ "policy_contracts": "Required",
}
data.update(**kwargs)
return data
@@ -312,7 +335,7 @@ def make_data_poc(self, **kwargs):
"name": "NOC",
"phone": PHONE,
"email": EMAIL,
- "url": WEBSITE
+ "url": WEBSITE,
}
data.update(**kwargs)
return data
@@ -328,7 +351,7 @@ def make_data_ixlan(self, **kwargs):
"mtu": 12345,
"dot1q_support": False,
"rs_asn": 12345,
- "arp_sponge": None
+ "arp_sponge": None,
}
data.update(**kwargs)
return data
@@ -340,7 +363,7 @@ def make_data_ixpfx(self, **kwargs):
data = {
"ixlan_id": SHARED["ixlan_r_ok"].id,
"protocol": "IPv4",
- "prefix": "10.%d.10.0/23" % (self.PREFIX_COUNT + 1)
+ "prefix": "10.%d.10.0/23" % (self.PREFIX_COUNT + 1),
}
if "prefix" not in kwargs:
self.PREFIX_COUNT += 1
@@ -427,9 +450,7 @@ def assert_data_integrity(self, data, typ, ignore=[]):
if k in ignore:
continue
if type(v) in [str, unicode]:
- self.assertIn(
- type(data.get(k)),
- [str, unicode], msg=msg % k)
+ self.assertIn(type(data.get(k)), [str, unicode], msg=msg % k)
elif type(v) in [int, long]:
self.assertIn(type(data.get(k)), [int, long], msg=msg % k)
else:
@@ -483,13 +504,14 @@ def assert_delete(self, db, typ, test_success=None, test_failure=None):
##########################################################################
- def assert_create(self, db, typ, data, test_failures=None,
- test_success=True, **kwargs):
+ def assert_create(
+ self, db, typ, data, test_failures=None, test_success=True, **kwargs
+ ):
if test_success:
r_data = self.assert_get_single(
- db.create(typ, data, return_response=True).get("data"))
- self.assert_existing_fields(data, r_data,
- ignore=kwargs.get("ignore"))
+ db.create(typ, data, return_response=True).get("data")
+ )
+ self.assert_existing_fields(data, r_data, ignore=kwargs.get("ignore"))
self.assertGreater(r_data.get("id"), 0)
status_checked = False
for model in QUEUE_ENABLED:
@@ -526,8 +548,7 @@ def assert_create(self, db, typ, data, test_failures=None,
with self.assertRaises(InvalidRequestException) as inst_status:
r = db.create(typ, data_status, return_response=True)
- self.assertIn("not yet been approved",
- str(inst_status.exception))
+ self.assertIn("not yet been approved", str(inst_status.exception))
# we test fail because of permissions
if "perms" in test_failures:
@@ -546,13 +567,13 @@ def assert_create_status_failure(self, db, typ, data):
"""
Wrapper for assert_create for assertion of permission failure
"""
- self.assert_create(db, typ, data, test_failures={"status": {}},
- test_success=False)
+ self.assert_create(
+ db, typ, data, test_failures={"status": {}}, test_success=False
+ )
##########################################################################
- def assert_update(self, db, typ, id, data, test_failures=False,
- test_success=True):
+ def assert_update(self, db, typ, id, data, test_failures=False, test_success=True):
if test_success:
orig = self.assert_get_handleref(db, typ, id)
@@ -612,9 +633,10 @@ def assert_update(self, db, typ, id, data, test_failures=False,
##########################################################################
- def assert_list_filter_related(self, target, rel, fld="id", valid=None,
- valid_m=None):
- #if not valid:
+ def assert_list_filter_related(
+ self, target, rel, fld="id", valid=None, valid_m=None
+ ):
+ # if not valid:
# valid = [o.id for k, o in SHARED.items() if type(
# o) != int and k.find("%s_" % target) == 0]
@@ -625,29 +647,25 @@ def assert_list_filter_related(self, target, rel, fld="id", valid=None,
ids = [
getattr(SHARED["%s_r_ok" % rel], fld),
- getattr(SHARED["%s_rw_ok" % rel], fld)
+ getattr(SHARED["%s_rw_ok" % rel], fld),
]
- kwargs_s = {
- "%s_%s" % (rel, qfld): getattr(SHARED["%s_r_ok" % rel], fld)
- }
- kwargs_m = {
- "%s_%s__in" % (rel, qfld): ",".join([str(id) for id in ids])
- }
+ kwargs_s = {"%s_%s" % (rel, qfld): getattr(SHARED["%s_r_ok" % rel], fld)}
+ kwargs_m = {"%s_%s__in" % (rel, qfld): ",".join([str(id) for id in ids])}
if hasattr(REFTAG_MAP[target], "%s" % rel):
valid_s = [
r.id
- for r in REFTAG_MAP[target].objects.filter(**kwargs_s)
+ for r in REFTAG_MAP[target]
+ .objects.filter(**kwargs_s)
.filter(status="ok")
]
valid_m = [
r.id
for r in REFTAG_MAP[target]
- .objects.filter(**{
- "%s_%s__in" % (rel, qfld): ids
- }).filter(status="ok")
+ .objects.filter(**{"%s_%s__in" % (rel, qfld): ids})
+ .filter(status="ok")
]
elif target == "poc":
@@ -655,7 +673,7 @@ def assert_list_filter_related(self, target, rel, fld="id", valid=None,
valid_m = [
SHARED["%s_r_ok_public" % target].id,
- SHARED["%s_rw_ok_public" % target].id
+ SHARED["%s_rw_ok_public" % target].id,
]
elif target == "ixpfx":
@@ -675,9 +693,7 @@ def assert_list_filter_related(self, target, rel, fld="id", valid=None,
valid_s = [SHARED["%s_r_ok" % target].id]
- valid_m = [
- SHARED["%s_r_ok" % target].id, SHARED["%s_rw_ok" % target].id
- ]
+ valid_m = [SHARED["%s_r_ok" % target].id, SHARED["%s_rw_ok" % target].id]
# exact
data = self.db_guest.all(target, **kwargs_s)
@@ -695,8 +711,16 @@ def assert_list_filter_related(self, target, rel, fld="id", valid=None,
##########################################################################
- def assert_related_depth(self, obj, serializer_class, r_depth, t_depth,
- note_tag, typ="listing", list_exclude=[]):
+ def assert_related_depth(
+ self,
+ obj,
+ serializer_class,
+ r_depth,
+ t_depth,
+ note_tag,
+ typ="listing",
+ list_exclude=[],
+ ):
"""
Assert the data indegrity of structures within a result that have
been expanded via the depth parameter
@@ -724,68 +748,84 @@ def assert_related_depth(self, obj, serializer_class, r_depth, t_depth,
if typ == "listing":
# in listing mode, depth should never expand pk relations
self.assertEqual(
- obj.get(pk_fld), None, msg="PK Relation %s %s" % (note_tag,
- pk_fld))
+ obj.get(pk_fld), None, msg="PK Relation %s %s" % (note_tag, pk_fld)
+ )
else:
# in single get mode, expand everything as long as we are at
# a relative depth greater than 1
if r_depth >= 1:
self.assert_related_depth(
- obj.get(pk_fld), REFTAG_MAP_SLZ.get(pk_fld),
- r_depth - 1, t_depth, "%s.%s" % (note_tag,
- pk_fld), typ=typ)
+ obj.get(pk_fld),
+ REFTAG_MAP_SLZ.get(pk_fld),
+ r_depth - 1,
+ t_depth,
+ "%s.%s" % (note_tag, pk_fld),
+ typ=typ,
+ )
else:
self.assertIn(
type(obj.get(pk_fld)),
[int, long, NoneType],
- msg="PK Relation %s %s" % (note_tag, pk_fld))
+ msg="PK Relation %s %s" % (note_tag, pk_fld),
+ )
# nested set relations
for n_fld, n_fld_cls in n_flds:
if r_depth > 1:
# sets should be expanded to objects
- self.assertIn(n_fld, obj,
- msg="Nested set existing (dN) %s %s" % (note_tag,
- n_fld))
+ self.assertIn(
+ n_fld, obj, msg="Nested set existing (dN) %s %s" % (note_tag, n_fld)
+ )
# make sure set exists and is of the correct type
self.assertEqual(
- type(obj[n_fld]), list,
- msg="Nested set list type (dN) %s %s" % (note_tag, n_fld))
+ type(obj[n_fld]),
+ list,
+ msg="Nested set list type (dN) %s %s" % (note_tag, n_fld),
+ )
# assert further depth expansions on all expanded objects in
# the set
for row in obj[n_fld]:
self.assert_related_depth(
- row, n_fld_cls, r_depth - 2, t_depth, "%s.%s" %
- (note_tag, n_fld), typ=typ, list_exclude=getattr(
- n_fld_cls.Meta, "list_exclude", []))
+ row,
+ n_fld_cls,
+ r_depth - 2,
+ t_depth,
+ "%s.%s" % (note_tag, n_fld),
+ typ=typ,
+ list_exclude=getattr(n_fld_cls.Meta, "list_exclude", []),
+ )
elif r_depth == 1:
# sets should be expanded to ids
- self.assertIn(n_fld, obj,
- msg="Nested set existing (d1) %s %s" % (note_tag,
- n_fld))
+ self.assertIn(
+ n_fld, obj, msg="Nested set existing (d1) %s %s" % (note_tag, n_fld)
+ )
# make sure set exists and is of the correct type
self.assertEqual(
- type(obj[n_fld]), list,
- msg="Nested set list type (d1) %s %s" % (note_tag, n_fld))
+ type(obj[n_fld]),
+ list,
+ msg="Nested set list type (d1) %s %s" % (note_tag, n_fld),
+ )
# make all values in the set are of type int or long
for row in obj[n_fld]:
self.assertIn(
type(row),
[long, int],
- msg="Nested set containing ids (d1) %s %s" % (note_tag,
- n_fld))
+ msg="Nested set containing ids (d1) %s %s" % (note_tag, n_fld),
+ )
else:
# sets should not exist
- self.assertNotIn(n_fld, obj,
- msg="Netsted set not existing (d0) %s %s" %
- (note_tag, n_fld))
+ self.assertNotIn(
+ n_fld,
+ obj,
+ msg="Netsted set not existing (d0) %s %s" % (note_tag, n_fld),
+ )
##########################################################################
# TESTS WITH USER THAT IS NOT A MEMBER OF AN ORGANIZATION
@@ -797,8 +837,7 @@ def test_user_001_GET_org(self):
##########################################################################
def test_user_001_GET_net(self):
- data = self.assert_get_handleref(self.db_user, "net",
- SHARED["net_r_ok"].id)
+ data = self.assert_get_handleref(self.db_user, "net", SHARED["net_r_ok"].id)
self.assertNotEqual(len(data.get("poc_set")), 0)
##########################################################################
@@ -809,8 +848,7 @@ def test_user_001_GET_ix(self):
##########################################################################
def test_user_001_GET_ix_net_count(self):
- data = self.assert_get_handleref(self.db_user, "ix",
- SHARED["ix_r_ok"].id)
+ data = self.assert_get_handleref(self.db_user, "ix", SHARED["ix_r_ok"].id)
self.assertEqual(data.get("net_count"), 1)
##########################################################################
@@ -821,57 +859,48 @@ def test_user_001_GET_fac(self):
##########################################################################
def test_user_001_GET_fac_netcount(self):
- data = self.assert_get_handleref(self.db_user, "fac",
- SHARED["fac_r_ok"].id)
+ data = self.assert_get_handleref(self.db_user, "fac", SHARED["fac_r_ok"].id)
self.assertEqual(data.get("net_count"), 1)
##########################################################################
def test_user_001_GET_poc_public(self):
- self.assert_get_handleref(self.db_user, "poc",
- SHARED["poc_r_ok_public"].id)
+ self.assert_get_handleref(self.db_user, "poc", SHARED["poc_r_ok_public"].id)
##########################################################################
def test_user_001_GET_poc_users(self):
- self.assert_get_handleref(self.db_user, "poc",
- SHARED["poc_r_ok_users"].id)
+ self.assert_get_handleref(self.db_user, "poc", SHARED["poc_r_ok_users"].id)
##########################################################################
def test_user_001_GET_poc_private(self):
- self.assert_get_forbidden(self.db_user, "poc",
- SHARED["poc_r_ok_private"].id)
+ self.assert_get_forbidden(self.db_user, "poc", SHARED["poc_r_ok_private"].id)
##########################################################################
def test_user_001_GET_nefac(self):
- self.assert_get_handleref(self.db_user, "netfac",
- SHARED["netfac_r_ok"].id)
+ self.assert_get_handleref(self.db_user, "netfac", SHARED["netfac_r_ok"].id)
##########################################################################
def test_user_001_GET_netixlan(self):
- self.assert_get_handleref(self.db_user, "netixlan",
- SHARED["netixlan_r_ok"].id)
+ self.assert_get_handleref(self.db_user, "netixlan", SHARED["netixlan_r_ok"].id)
##########################################################################
def test_user_001_GET_ixfac(self):
- self.assert_get_handleref(self.db_user, "ixfac",
- SHARED["ixfac_r_ok"].id)
+ self.assert_get_handleref(self.db_user, "ixfac", SHARED["ixfac_r_ok"].id)
##########################################################################
def test_user_001_GET_ixlan(self):
- self.assert_get_handleref(self.db_user, "ixlan",
- SHARED["ixlan_r_ok"].id)
+ self.assert_get_handleref(self.db_user, "ixlan", SHARED["ixlan_r_ok"].id)
##########################################################################
def test_user_001_GET_ixpfx(self):
- self.assert_get_handleref(self.db_user, "ixpfx",
- SHARED["ixpfx_r_ok"].id)
+ self.assert_get_handleref(self.db_user, "ixpfx", SHARED["ixpfx_r_ok"].id)
##########################################################################
@@ -887,30 +916,32 @@ def test_user_005_list_poc(self):
def test_user_001_GET_as_set(self):
data = self.db_guest.all("as_set")
networks = Network.objects.filter(status="ok")
- print(data)
+ print (data)
for net in networks:
self.assertEqual(data[0].get(u"{}".format(net.asn)), net.irr_as_set)
-
##########################################################################
# TESTS WITH USER THAT IS ORGANIZATION MEMBER
##########################################################################
def test_org_member_001_GET_poc_public(self):
- self.assert_get_handleref(self.db_org_member, "poc",
- SHARED["poc_r_ok_public"].id)
+ self.assert_get_handleref(
+ self.db_org_member, "poc", SHARED["poc_r_ok_public"].id
+ )
##########################################################################
def test_org_member_001_GET_poc_users(self):
- self.assert_get_handleref(self.db_org_member, "poc",
- SHARED["poc_r_ok_users"].id)
+ self.assert_get_handleref(
+ self.db_org_member, "poc", SHARED["poc_r_ok_users"].id
+ )
##########################################################################
def test_org_member_001_GET_poc_private(self):
- self.assert_get_handleref(self.db_org_member, "poc",
- SHARED["poc_r_ok_private"].id)
+ self.assert_get_handleref(
+ self.db_org_member, "poc", SHARED["poc_r_ok_private"].id
+ )
##########################################################################
# TESTS WITH USER THAT IS ORGANIZATION ADMINISTRATOR
@@ -919,22 +950,23 @@ def test_org_member_001_GET_poc_private(self):
##########################################################################
def test_org_admin_001_GET_poc_public(self):
- self.assert_get_handleref(self.db_org_admin, "poc",
- SHARED["poc_r_ok_public"].id)
+ self.assert_get_handleref(
+ self.db_org_admin, "poc", SHARED["poc_r_ok_public"].id
+ )
##########################################################################
def test_org_admin_001_GET_poc_users(self):
- self.assert_get_handleref(self.db_org_admin, "poc",
- SHARED["poc_r_ok_users"].id)
+ self.assert_get_handleref(self.db_org_admin, "poc", SHARED["poc_r_ok_users"].id)
##########################################################################
def test_org_admin_001_GET_poc_private(self):
# org admin is admin of rw org, so trying to access the private poc of the r org
# should still be forbidden
- self.assert_get_forbidden(self.db_org_admin, "poc",
- SHARED["poc_r_ok_private"].id)
+ self.assert_get_forbidden(
+ self.db_org_admin, "poc", SHARED["poc_r_ok_private"].id
+ )
##########################################################################
@@ -948,63 +980,69 @@ def test_org_admin_002_POST_PUT_DELETE_ix(self):
data,
ignore=["prefix"],
test_failures={
- "invalid": {
- "prefix": self.get_prefix4(),
- "name": ""
- },
+ "invalid": {"prefix": self.get_prefix4(), "name": ""},
"perms": {
"prefix": self.get_prefix4(),
# need to set name again so it doesnt fail unique validation
"name": self.make_name("Test"),
# set org to an organization the user doesnt have perms to
- "org_id": SHARED["org_r_ok"].id
+ "org_id": SHARED["org_r_ok"].id,
},
"status": {
# need to set name again so it doesnt fail unique validation
"prefix": self.get_prefix4(),
"name": self.make_name("Test"),
- "org_id": SHARED["org_rwp"].id
- }
- })
+ "org_id": SHARED["org_rwp"].id,
+ },
+ },
+ )
SHARED["ix_id"] = r_data.get("id")
- self.assert_update(self.db_org_admin, "ix", SHARED["ix_id"],
- {"name": self.make_name("Test")}, test_failures={
- "invalid": {
- "name": ""
- },
- "perms": {
- "id": SHARED["ix_r_ok"].id
- }
- })
+ self.assert_update(
+ self.db_org_admin,
+ "ix",
+ SHARED["ix_id"],
+ {"name": self.make_name("Test")},
+ test_failures={
+ "invalid": {"name": ""},
+ "perms": {"id": SHARED["ix_r_ok"].id},
+ },
+ )
- self.assert_delete(self.db_org_admin, "ix",
- test_success=SHARED["ix_id"],
- test_failure=SHARED["ix_r_ok"].id)
+ self.assert_delete(
+ self.db_org_admin,
+ "ix",
+ test_success=SHARED["ix_id"],
+ test_failure=SHARED["ix_r_ok"].id,
+ )
self.assert_create(
- self.db_org_admin, "ix", data, test_success=False, test_failures={
+ self.db_org_admin,
+ "ix",
+ data,
+ test_success=False,
+ test_failures={
"invalid": {
"prefix": self.get_prefix4(),
"policy_email": "",
- "tech_email": ""
+ "tech_email": "",
},
- })
+ },
+ )
- self.assert_create(self.db_org_admin, "ix", data, test_success=False,
- test_failures={
- "invalid": {
- "prefix": ""
- },
- })
+ self.assert_create(
+ self.db_org_admin,
+ "ix",
+ data,
+ test_success=False,
+ test_failures={"invalid": {"prefix": ""},},
+ )
# test ix creation with a ipv6 prefix
data = self.make_data_ix(prefix=self.get_prefix6())
self.assert_create(self.db_org_admin, "ix", data, ignore=["prefix"])
-
-
##########################################################################
def test_org_admin_002_POST_PUT_DELETE_fac(self):
@@ -1015,20 +1053,19 @@ def test_org_admin_002_POST_PUT_DELETE_fac(self):
"fac",
data,
test_failures={
- "invalid": {
- "name": ""
- },
+ "invalid": {"name": ""},
"perms": {
# need to set name again so it doesnt fail unique validation
"name": self.make_name("Test"),
# set org to an organization the user doesnt have perms to
- "org_id": SHARED["org_r_ok"].id
+ "org_id": SHARED["org_r_ok"].id,
},
"status": {
"name": self.make_name("Test"),
- "org_id": SHARED["org_rwp"].id
- }
- })
+ "org_id": SHARED["org_rwp"].id,
+ },
+ },
+ )
SHARED["fac_id"] = r_data.get("id")
@@ -1038,22 +1075,21 @@ def test_org_admin_002_POST_PUT_DELETE_fac(self):
SHARED["fac_id"],
{"name": self.make_name("Test")},
test_failures={
- "invalid": {
- "name": ""
- },
- "perms": {
- "id": SHARED["fac_r_ok"].id
- },
+ "invalid": {"name": ""},
+ "perms": {"id": SHARED["fac_r_ok"].id},
"readonly": {
- "latitude": 1, #this should not take as it is read only
- "longitude": 1 #this should not take as it is read only
- }
+ "latitude": 1, # this should not take as it is read only
+ "longitude": 1, # this should not take as it is read only
+ },
},
)
- self.assert_delete(self.db_org_admin, "fac",
- test_success=SHARED["fac_id"],
- test_failure=SHARED["fac_r_ok"].id)
+ self.assert_delete(
+ self.db_org_admin,
+ "fac",
+ test_success=SHARED["fac_id"],
+ test_failure=SHARED["fac_r_ok"].id,
+ )
##########################################################################
@@ -1065,46 +1101,51 @@ def test_org_admin_002_POST_PUT_DELETE_net(self):
"net",
data,
test_failures={
- "invalid": {
- "name": ""
- },
+ "invalid": {"name": ""},
"perms": {
# need to set name again so it doesnt fail unique validation
"name": self.make_name("Test"),
"asn": data["asn"] + 1,
# set org to an organization the user doesnt have perms to
- "org_id": SHARED["org_r_ok"].id
+ "org_id": SHARED["org_r_ok"].id,
},
"status": {
"org_id": SHARED["org_rwp"].id,
"asn": data["asn"] + 1,
- "name": self.make_name("Test")
- }
- })
+ "name": self.make_name("Test"),
+ },
+ },
+ )
SHARED["net_id"] = r_data.get("id")
- self.assert_update(self.db_org_admin, "net", SHARED["net_id"],
- {"name": self.make_name("Test")}, test_failures={
- "invalid": {
- "name": ""
- },
- "perms": {
- "id": SHARED["net_r_ok"].id
- }
- })
+ self.assert_update(
+ self.db_org_admin,
+ "net",
+ SHARED["net_id"],
+ {"name": self.make_name("Test")},
+ test_failures={
+ "invalid": {"name": ""},
+ "perms": {"id": SHARED["net_r_ok"].id},
+ },
+ )
- self.assert_delete(self.db_org_admin, "net",
- test_success=SHARED["net_id"],
- test_failure=SHARED["net_r_ok"].id)
+ self.assert_delete(
+ self.db_org_admin,
+ "net",
+ test_success=SHARED["net_id"],
+ test_failure=SHARED["net_r_ok"].id,
+ )
# Test RiR not found failure
r_data = self.assert_create(
- self.db_org_admin, "net", data,
- test_failures={"invalid": {
- "asn": 9999999
- }}, test_success=False)
+ self.db_org_admin,
+ "net",
+ data,
+ test_failures={"invalid": {"asn": 9999999}},
+ test_success=False,
+ )
##########################################################################
def test_org_admin_002_POST_net_deleted(self):
@@ -1113,8 +1154,10 @@ def test_org_admin_002_POST_net_deleted(self):
with self.assertRaises(InvalidRequestException) as exc:
r_data = self.db_org_admin.create("net", data, return_response=True)
- assert exc.exception.extra["asn"].find("Network has been deleted. Please contact") == 0
-
+ assert (
+ exc.exception.extra["asn"].find("Network has been deleted. Please contact")
+ == 0
+ )
##########################################################################
@@ -1127,11 +1170,11 @@ def test_org_admin_002_POST_PUT_DELETE_as_set(self):
data = self.make_data_net(asn=9000900)
with self.assertRaises(Exception) as exc:
- r_data = self.assert_create(self.db_org_admin,"as_set",data)
+ r_data = self.assert_create(self.db_org_admin, "as_set", data)
self.assertIn("You do not have permission", str(exc.exception))
with self.assertRaises(Exception) as exc:
- self.db_org_admin.update("as_set", {"9000900":"AS-XXX"})
+ self.db_org_admin.update("as_set", {"9000900": "AS-XXX"})
self.assertIn("You do not have permission", str(exc.exception))
with self.assertRaises(Exception) as exc:
@@ -1147,10 +1190,12 @@ def test_org_admin_002_POST_net_bogon_asn(self):
data = self.make_data_net()
for bogon_asn in inet.BOGON_ASN_RANGES:
r_data = self.assert_create(
- self.db_org_admin, "net", data,
- test_failures={"invalid": {
- "asn": bogon_asn[0]
- }}, test_success=False)
+ self.db_org_admin,
+ "net",
+ data,
+ test_failures={"invalid": {"asn": bogon_asn[0]}},
+ test_success=False,
+ )
# server running in tutorial mode should be allowed
# to create networks with bogon asns, so we test that
@@ -1174,14 +1219,18 @@ def test_org_admin_002_PUT_net_write_only_fields(self):
"""
def test_write_only_fields_missing(orig, updated):
- assert (updated.has_key("allow_ixp_update") == False)
+ assert updated.has_key("allow_ixp_update") == False
net = SHARED["net_rw_ok"]
self.assertEqual(net.allow_ixp_update, False)
- self.assert_update(self.db_org_admin, "net", net.id,
- {"allow_ixp_update": True},
- test_success=[test_write_only_fields_missing])
+ self.assert_update(
+ self.db_org_admin,
+ "net",
+ net.id,
+ {"allow_ixp_update": True},
+ test_success=[test_write_only_fields_missing],
+ )
net.refresh_from_db()
self.assertEqual(net.allow_ixp_update, True)
@@ -1193,7 +1242,7 @@ def test_org_admin_002_POST_PUT_DELETE_netfac(self):
data = {
"net_id": SHARED["net_rw_ok"].id,
"fac_id": SHARED["fac_rw_ok"].id,
- "local_asn": 12345
+ "local_asn": 12345,
}
r_data = self.assert_create(
@@ -1201,9 +1250,7 @@ def test_org_admin_002_POST_PUT_DELETE_netfac(self):
"netfac",
data,
test_failures={
- "invalid": {
- "net_id": ""
- },
+ "invalid": {"net_id": ""},
"perms": {
# set network to one the user doesnt have perms to
"net_id": SHARED["net_r_ok"].id
@@ -1211,31 +1258,36 @@ def test_org_admin_002_POST_PUT_DELETE_netfac(self):
"status": {
"net_id": SHARED["net_rw_pending"].id,
"fac_id": SHARED["fac_rw_pending"].id,
- }
- })
+ },
+ },
+ )
SHARED["netfac_id"] = r_data.get("id")
- self.assert_update(self.db_org_admin, "netfac", SHARED["netfac_id"],
- {"local_asn": random.randint(999, 9999)},
- test_failures={
- "invalid": {
- "fac_id": ""
- },
- "perms": {
- "net_id": SHARED["net_r_ok"].id
- }
- })
-
- self.assert_delete(self.db_org_admin, "netfac",
- test_success=SHARED["netfac_id"],
- test_failure=SHARED["netfac_r_ok"].id)
+ self.assert_update(
+ self.db_org_admin,
+ "netfac",
+ SHARED["netfac_id"],
+ {"local_asn": random.randint(999, 9999)},
+ test_failures={
+ "invalid": {"fac_id": ""},
+ "perms": {"net_id": SHARED["net_r_ok"].id},
+ },
+ )
+
+ self.assert_delete(
+ self.db_org_admin,
+ "netfac",
+ test_success=SHARED["netfac_id"],
+ test_failure=SHARED["netfac_r_ok"].id,
+ )
# re-create deleted netfac
r_data = self.assert_create(self.db_org_admin, "netfac", data)
# re-delete
- self.assert_delete(self.db_org_admin, "netfac",
- test_success=SHARED["netfac_id"])
+ self.assert_delete(
+ self.db_org_admin, "netfac", test_success=SHARED["netfac_id"]
+ )
##########################################################################
@@ -1247,33 +1299,34 @@ def test_org_admin_002_POST_PUT_DELETE_poc(self):
"poc",
data,
test_failures={
- "invalid": {
- "net_id": ""
- },
+ "invalid": {"net_id": ""},
"perms": {
# set network to one the user doesnt have perms to
"net_id": SHARED["net_r_ok"].id
},
- "status": {
- "net_id": SHARED["net_rw_pending"].id
- }
- })
+ "status": {"net_id": SHARED["net_rw_pending"].id},
+ },
+ )
SHARED["poc_id"] = r_data.get("id")
- self.assert_update(self.db_org_admin, "poc", SHARED["poc_id"],
- {"role": "Sales"}, test_failures={
- "invalid": {
- "role": "NOPE"
- },
- "perms": {
- "net_id": SHARED["net_r_ok"].id
- }
- })
+ self.assert_update(
+ self.db_org_admin,
+ "poc",
+ SHARED["poc_id"],
+ {"role": "Sales"},
+ test_failures={
+ "invalid": {"role": "NOPE"},
+ "perms": {"net_id": SHARED["net_r_ok"].id},
+ },
+ )
- self.assert_delete(self.db_org_admin, "poc",
- test_success=SHARED["poc_id"],
- test_failure=SHARED["poc_r_ok_users"].id)
+ self.assert_delete(
+ self.db_org_admin,
+ "poc",
+ test_success=SHARED["poc_id"],
+ test_failure=SHARED["poc_r_ok_users"].id,
+ )
##########################################################################
@@ -1281,226 +1334,249 @@ def test_org_admin_002_POST_PUT_DELETE_ixlan(self):
data = self.make_data_ixlan(ix_id=SHARED["ix_rw_ok"].id)
r_data = self.assert_create(
- self.db_org_admin, "ixlan", data, test_failures={
- "invalid": {
- "ix_id": ""
- },
- "perms": {
- "ix_id": SHARED["ix_r_ok"].id
- },
- "status": {
- "ix_id": SHARED["ix_rw_pending"].id
- }
- })
+ self.db_org_admin,
+ "ixlan",
+ data,
+ test_failures={
+ "invalid": {"ix_id": ""},
+ "perms": {"ix_id": SHARED["ix_r_ok"].id},
+ "status": {"ix_id": SHARED["ix_rw_pending"].id},
+ },
+ )
SHARED["ixlan_id"] = r_data["id"]
- self.assert_update(self.db_org_admin, "ixlan", SHARED["ixlan_id"],
- {"name": self.make_name("Test")}, test_failures={
- "invalid": {
- "mtu": "NEEDS TO BE INT"
- },
- "perms": {
- "ix_id": SHARED["ix_r_ok"].id
- }
- })
+ self.assert_update(
+ self.db_org_admin,
+ "ixlan",
+ SHARED["ixlan_id"],
+ {"name": self.make_name("Test")},
+ test_failures={
+ "invalid": {"mtu": "NEEDS TO BE INT"},
+ "perms": {"ix_id": SHARED["ix_r_ok"].id},
+ },
+ )
- self.assert_delete(self.db_org_admin, "ixlan",
- test_success=SHARED["ixlan_id"],
- test_failure=SHARED["ixlan_r_ok"].id)
+ self.assert_delete(
+ self.db_org_admin,
+ "ixlan",
+ test_success=SHARED["ixlan_id"],
+ test_failure=SHARED["ixlan_r_ok"].id,
+ )
##########################################################################
def test_org_admin_002_POST_PUT_DELETE_ixpfx(self):
- data = self.make_data_ixpfx(ixlan_id=SHARED["ixlan_rw_ok"].id,
- prefix="206.126.236.0/25")
+ data = self.make_data_ixpfx(
+ ixlan_id=SHARED["ixlan_rw_ok"].id, prefix="206.126.236.0/25"
+ )
r_data = self.assert_create(
- self.db_org_admin, "ixpfx", data, test_failures={
- "invalid": {
- "prefix": "127.0.0.0/8"
- },
+ self.db_org_admin,
+ "ixpfx",
+ data,
+ test_failures={
+ "invalid": {"prefix": "127.0.0.0/8"},
"perms": {
"prefix": "205.127.237.0/24",
- "ixlan_id": SHARED["ixlan_r_ok"].id
+ "ixlan_id": SHARED["ixlan_r_ok"].id,
},
"status": {
"prefix": "205.127.237.0/24",
- "ixlan_id": SHARED["ixlan_rw_pending"].id
- }
- })
+ "ixlan_id": SHARED["ixlan_rw_pending"].id,
+ },
+ },
+ )
SHARED["ixpfx_id"] = r_data["id"]
- #self.assert_create(self.db_org_admin, "ixpfx", data, test_failures={
+ # self.assert_create(self.db_org_admin, "ixpfx", data, test_failures={
# "invalid": {
# "prefix": "206.126.236.0/25"
# },
- #}, test_success=False)
-
- self.assert_update(self.db_org_admin, "ixpfx", SHARED["ixpfx_id"],
- {"prefix": "206.127.236.0/26"}, test_failures={
- "invalid": {
- "prefix": "NEEDS TO BE VALID PREFIX"
- },
- "perms": {
- "ixlan_id": SHARED["ixlan_r_ok"].id
- }
- })
-
- self.assert_delete(self.db_org_admin, "ixpfx",
- test_success=SHARED["ixpfx_id"],
- test_failure=SHARED["ixpfx_r_ok"].id)
+ # }, test_success=False)
+
+ self.assert_update(
+ self.db_org_admin,
+ "ixpfx",
+ SHARED["ixpfx_id"],
+ {"prefix": "206.127.236.0/26"},
+ test_failures={
+ "invalid": {"prefix": "NEEDS TO BE VALID PREFIX"},
+ "perms": {"ixlan_id": SHARED["ixlan_r_ok"].id},
+ },
+ )
+
+ self.assert_delete(
+ self.db_org_admin,
+ "ixpfx",
+ test_success=SHARED["ixpfx_id"],
+ test_failure=SHARED["ixpfx_r_ok"].id,
+ )
# re-create deleted ixpfx
r_data = self.assert_create(self.db_org_admin, "ixpfx", data)
# re-delete
- self.assert_delete(self.db_org_admin, "ixpfx",
- test_success=SHARED["ixpfx_id"])
+ self.assert_delete(self.db_org_admin, "ixpfx", test_success=SHARED["ixpfx_id"])
# re-creating a deleted ixpfx that we dont have write permissions do
# should fail
- pfx = IXLanPrefix.objects.create(ixlan=SHARED["ixlan_r_ok"],
- prefix=u"205.127.237.0/24",
- protocol="IPv4")
+ pfx = IXLanPrefix.objects.create(
+ ixlan=SHARED["ixlan_r_ok"], prefix=u"205.127.237.0/24", protocol="IPv4"
+ )
pfx.delete()
data.update(prefix="205.127.237.0/24")
- r_data = self.assert_create(self.db_org_admin, "ixpfx", data,
- test_failures={"invalid": {
- }}, test_success=False)
+ r_data = self.assert_create(
+ self.db_org_admin,
+ "ixpfx",
+ data,
+ test_failures={"invalid": {}},
+ test_success=False,
+ )
# make sure protocols are validated
- r_data = self.assert_create(self.db_org_admin, "ixpfx", data,
- test_failures={
- "invalid": {
- "prefix": "207.128.238.0/24",
- "protocol": "IPv6"
- },
- }, test_success=False)
+ r_data = self.assert_create(
+ self.db_org_admin,
+ "ixpfx",
+ data,
+ test_failures={
+ "invalid": {"prefix": "207.128.238.0/24", "protocol": "IPv6"},
+ },
+ test_success=False,
+ )
##########################################################################
def test_org_admin_002_POST_PUT_DELETE_netixlan(self):
- data = self.make_data_netixlan(net_id=SHARED["net_rw_ok"].id,
- ixlan_id=SHARED["ixlan_rw_ok"].id)
+ data = self.make_data_netixlan(
+ net_id=SHARED["net_rw_ok"].id, ixlan_id=SHARED["ixlan_rw_ok"].id
+ )
r_data = self.assert_create(
self.db_org_admin,
"netixlan",
data,
test_failures={
- "invalid": {
- "ipaddr4": u"a b c"
- },
+ "invalid": {"ipaddr4": u"a b c"},
"perms": {
# set network to one the user doesnt have perms to
"ipaddr4": self.get_ip4(SHARED["ixlan_rw_ok"]),
"ipaddr6": self.get_ip6(SHARED["ixlan_rw_ok"]),
- "net_id": SHARED["net_r_ok"].id
- }
- })
-
+ "net_id": SHARED["net_r_ok"].id,
+ },
+ },
+ )
SHARED["netixlan_id"] = r_data.get("id")
- self.assert_update(self.db_org_admin, "netixlan",
- SHARED["netixlan_id"], {"speed": 2000},
- test_failures={
- "invalid": {
- "ipaddr4": "NEEDS TO BE VALID IP"
- },
- "perms": {
- "net_id": SHARED["net_r_ok"].id
- }
- })
-
- self.assert_delete(self.db_org_admin, "netixlan",
- test_success=SHARED["netixlan_id"],
- test_failure=SHARED["netixlan_r_ok"].id)
+ self.assert_update(
+ self.db_org_admin,
+ "netixlan",
+ SHARED["netixlan_id"],
+ {"speed": 2000},
+ test_failures={
+ "invalid": {"ipaddr4": "NEEDS TO BE VALID IP"},
+ "perms": {"net_id": SHARED["net_r_ok"].id},
+ },
+ )
+ self.assert_delete(
+ self.db_org_admin,
+ "netixlan",
+ test_success=SHARED["netixlan_id"],
+ test_failure=SHARED["netixlan_r_ok"].id,
+ )
##########################################################################
def test_org_admin_002_POST_PUT_netixlan_validation(self):
- data = self.make_data_netixlan(net_id=SHARED["net_rw_ok"].id,
- ixlan_id=SHARED["ixlan_rw_ok"].id)
+ data = self.make_data_netixlan(
+ net_id=SHARED["net_rw_ok"].id, ixlan_id=SHARED["ixlan_rw_ok"].id
+ )
test_failures = [
# test failure if ip4 not in prefix
- {"invalid": { "ipaddr4": self.get_ip4(SHARED["ixlan_r_ok"]) }},
+ {"invalid": {"ipaddr4": self.get_ip4(SHARED["ixlan_r_ok"])}},
# test failure if ip6 not in prefix
- {"invalid": { "ipaddr6": self.get_ip6(SHARED["ixlan_r_ok"]) }},
+ {"invalid": {"ipaddr6": self.get_ip6(SHARED["ixlan_r_ok"])}},
]
for test_failure in test_failures:
- self.assert_create(self.db_org_admin, "netixlan", data,
- test_failures=test_failure, test_success=False)
-
-
+ self.assert_create(
+ self.db_org_admin,
+ "netixlan",
+ data,
+ test_failures=test_failure,
+ test_success=False,
+ )
##########################################################################
def test_org_admin_002_POST_PUT_DELETE_ixfac(self):
- data = {
- "fac_id": SHARED["fac_rw2_ok"].id,
- "ix_id": SHARED["ix_rw2_ok"].id
- }
+ data = {"fac_id": SHARED["fac_rw2_ok"].id, "ix_id": SHARED["ix_rw2_ok"].id}
r_data = self.assert_create(
self.db_org_admin,
"ixfac",
data,
test_failures={
- "invalid": {
- "ix_id": ""
- },
+ "invalid": {"ix_id": ""},
"perms": {
# set network to one the user doesnt have perms to
"ix_id": SHARED["ix_r_ok"].id
},
"status": {
"fac_id": SHARED["fac_rw2_pending"].id,
- "ix_id": SHARED["ix_rw2_pending"].id
- }
- })
+ "ix_id": SHARED["ix_rw2_pending"].id,
+ },
+ },
+ )
SHARED["ixfac_id"] = r_data.get("id")
- self.assert_update(self.db_org_admin, "ixfac", SHARED["ixfac_id"],
- {"fac_id": SHARED["fac_r2_ok"].id}, test_failures={
- "invalid": {
- "fac_id": ""
- },
- "perms": {
- "ix_id": SHARED["ix_r_ok"].id
- }
- })
+ self.assert_update(
+ self.db_org_admin,
+ "ixfac",
+ SHARED["ixfac_id"],
+ {"fac_id": SHARED["fac_r2_ok"].id},
+ test_failures={
+ "invalid": {"fac_id": ""},
+ "perms": {"ix_id": SHARED["ix_r_ok"].id},
+ },
+ )
- self.assert_delete(self.db_org_admin, "ixfac",
- test_success=SHARED["ixfac_id"],
- test_failure=SHARED["ixfac_r_ok"].id)
+ self.assert_delete(
+ self.db_org_admin,
+ "ixfac",
+ test_success=SHARED["ixfac_id"],
+ test_failure=SHARED["ixfac_r_ok"].id,
+ )
##########################################################################
def test_org_admin_003_PUT_org(self):
- self.assert_update(self.db_org_admin, "org", SHARED["org_rw_ok"].id,
- {"name": self.make_name("Test")}, test_failures={
- "invalid": {
- "name": ""
- },
- "perms": {
- "id": SHARED["org_r_ok"].id
- }
- })
+ self.assert_update(
+ self.db_org_admin,
+ "org",
+ SHARED["org_rw_ok"].id,
+ {"name": self.make_name("Test")},
+ test_failures={
+ "invalid": {"name": ""},
+ "perms": {"id": SHARED["org_r_ok"].id},
+ },
+ )
##########################################################################
def test_zz_org_admin_004_DELETE_org(self):
- self.assert_delete(self.db_org_admin, "org",
- test_success=SHARED["org_rw_ok"].id,
- test_failure=SHARED["org_r_ok"].id)
+ self.assert_delete(
+ self.db_org_admin,
+ "org",
+ test_success=SHARED["org_rw_ok"].id,
+ test_failure=SHARED["org_r_ok"].id,
+ )
##########################################################################
# GUEST TESTS
@@ -1512,8 +1588,7 @@ def test_guest_001_GET_org(self):
##########################################################################
def test_guest_001_GET_net(self):
- data = self.assert_get_handleref(self.db_guest, "net",
- SHARED["net_r_ok"].id)
+ data = self.assert_get_handleref(self.db_guest, "net", SHARED["net_r_ok"].id)
for poc in data.get("poc_set"):
self.assertEqual(poc["visible"], "Public")
@@ -1527,8 +1602,9 @@ def __test_guest_001_GET_asn(self):
self.assert_get_handleref(self.db_guest, "asn", SHARED["net_r_ok"].asn)
with self.assertRaises(InvalidRequestException) as inst:
- self.assert_get_handleref(self.db_guest, "asn",
- "%s[" % SHARED["net_r_ok"].asn)
+ self.assert_get_handleref(
+ self.db_guest, "asn", "%s[" % SHARED["net_r_ok"].asn
+ )
##########################################################################
@@ -1543,50 +1619,42 @@ def test_guest_001_GET_fac(self):
##########################################################################
def test_guest_001_GET_poc_private(self):
- self.assert_get_forbidden(self.db_guest, "poc",
- SHARED["poc_r_ok_private"].id)
+ self.assert_get_forbidden(self.db_guest, "poc", SHARED["poc_r_ok_private"].id)
##########################################################################
def test_guest_001_GET_poc_users(self):
- self.assert_get_forbidden(self.db_guest, "poc",
- SHARED["poc_r_ok_users"].id)
+ self.assert_get_forbidden(self.db_guest, "poc", SHARED["poc_r_ok_users"].id)
##########################################################################
def test_guest_001_GET_poc_public(self):
- self.assert_get_handleref(self.db_guest, "poc",
- SHARED["poc_r_ok_public"].id)
+ self.assert_get_handleref(self.db_guest, "poc", SHARED["poc_r_ok_public"].id)
##########################################################################
def test_guest_001_GET_nefac(self):
- self.assert_get_handleref(self.db_guest, "netfac",
- SHARED["netfac_r_ok"].id)
+ self.assert_get_handleref(self.db_guest, "netfac", SHARED["netfac_r_ok"].id)
##########################################################################
def test_guest_001_GET_netixlan(self):
- self.assert_get_handleref(self.db_guest, "netixlan",
- SHARED["netixlan_r_ok"].id)
+ self.assert_get_handleref(self.db_guest, "netixlan", SHARED["netixlan_r_ok"].id)
##########################################################################
def test_guest_001_GET_ixfac(self):
- self.assert_get_handleref(self.db_guest, "ixfac",
- SHARED["ixfac_r_ok"].id)
+ self.assert_get_handleref(self.db_guest, "ixfac", SHARED["ixfac_r_ok"].id)
##########################################################################
def test_guest_001_GET_ixlan(self):
- self.assert_get_handleref(self.db_guest, "ixlan",
- SHARED["ixlan_r_ok"].id)
+ self.assert_get_handleref(self.db_guest, "ixlan", SHARED["ixlan_r_ok"].id)
##########################################################################
def test_guest_001_GET_ixpfx(self):
- self.assert_get_handleref(self.db_guest, "ixpfx",
- SHARED["ixpfx_r_ok"].id)
+ self.assert_get_handleref(self.db_guest, "ixpfx", SHARED["ixpfx_r_ok"].id)
##########################################################################
@@ -1642,8 +1710,7 @@ def test_org_admin_005_list(self):
##########################################################################
def test_guest_005_fields_filter(self):
- data = self.db_guest.all("org", limit=10, fields=",".join(
- ["name", "status"]))
+ data = self.db_guest.all("org", limit=10, fields=",".join(["name", "status"]))
self.assertGreater(len(data), 0)
for row in data:
self.assertEqual(sorted(row.keys()), sorted([u"name", u"status"]))
@@ -1673,8 +1740,9 @@ def test_guest_005_list_pagination(self):
##########################################################################
def test_guest_005_list_since(self):
- data = self.db_guest.all("net", since=int(START_TIMESTAMP) - 10,
- status="deleted")
+ data = self.db_guest.all(
+ "net", since=int(START_TIMESTAMP) - 10, status="deleted"
+ )
self.assertEqual(len(data), 2)
self.assert_handleref_integrity(data[0])
self.assert_data_integrity(data[0], "net")
@@ -1700,8 +1768,9 @@ def test_guest_005_get_depth_all(self):
pk_flds, n_flds = self.serializer_related_fields(slz)
obj = data[0]
- self.assert_related_depth(obj, slz, depth, depth, note_tag,
- typ="single")
+ self.assert_related_depth(
+ obj, slz, depth, depth, note_tag, typ="single"
+ )
##########################################################################
@@ -1724,8 +1793,9 @@ def test_guest_005_list_depth_all(self):
pk_flds, n_flds = self.serializer_related_fields(slz)
obj = data[0]
- self.assert_related_depth(obj, slz, depth, depth, note_tag,
- typ="listing")
+ self.assert_related_depth(
+ obj, slz, depth, depth, note_tag, typ="listing"
+ )
##########################################################################
@@ -1789,19 +1859,24 @@ def test_guest_005_list_filter_dates_numeric(self):
kwargs = {fld: DATE[1]}
data = self.db_guest.all("fac", limit=10, **kwargs)
self.assertGreater(
- len(data), 0, msg="%s_%s - data length assertion" % (fld,
- flt))
+ len(data), 0, msg="%s_%s - data length assertion" % (fld, flt)
+ )
for row in data:
self.assert_data_integrity(row, "fac")
try:
dt = datetime.datetime.strptime(
- row[fld], "%Y-%m-%dT%H:%M:%SZ").date()
+ row[fld], "%Y-%m-%dT%H:%M:%SZ"
+ ).date()
except ValueError:
dt = datetime.datetime.strptime(
- row[fld], "%Y-%m-%dT%H:%M:%S.%fZ").date()
+ row[fld], "%Y-%m-%dT%H:%M:%S.%fZ"
+ ).date()
fnc = getattr(self, "assert%s" % ass)
- fnc(dt, DATE[0],
- msg="%s__%s: %s, %s" % (fld, flt, row[fld], DATE[1]))
+ fnc(
+ dt,
+ DATE[0],
+ msg="%s__%s: %s, %s" % (fld, flt, row[fld], DATE[1]),
+ )
##########################################################################
@@ -2024,32 +2099,35 @@ def test_guest_005_list_filter_ix_asn_overlap(self):
# create ixlan at each exchange
ixlans = [
- IXLan.objects.create(status="ok",
- **self.make_data_ixlan(ix_id=ix.id))
+ IXLan.objects.create(status="ok", **self.make_data_ixlan(ix_id=ix.id))
for ix in exchanges
]
# all three networks peer at first exchange
for net in networks:
- NetworkIXLan.objects.create(network=net, ixlan=ixlans[0],
- status="ok", asn=net.asn, speed=0)
+ NetworkIXLan.objects.create(
+ network=net, ixlan=ixlans[0], status="ok", asn=net.asn, speed=0
+ )
# only the first two networks peer at second exchange
for net in networks[:2]:
- NetworkIXLan.objects.create(network=net, ixlan=ixlans[1],
- status="ok", asn=net.asn, speed=0)
+ NetworkIXLan.objects.create(
+ network=net, ixlan=ixlans[1], status="ok", asn=net.asn, speed=0
+ )
# do test queries
# query #1 - test overlapping exchanges for all 3 asns - should return first ix
- data = self.db_guest.all("ix", asn_overlap=",".join(
- [str(net.asn) for net in networks]))
+ data = self.db_guest.all(
+ "ix", asn_overlap=",".join([str(net.asn) for net in networks])
+ )
self.assertEqual(len(data), 1)
self.assertEqual(data[0]["id"], exchanges[0].id)
# query #2 - test overlapping exchanges for first 2 asns - should return both ixs
- data = self.db_guest.all("ix", asn_overlap=",".join(
- [str(net.asn) for net in networks[:2]]))
+ data = self.db_guest.all(
+ "ix", asn_overlap=",".join([str(net.asn) for net in networks[:2]])
+ )
self.assertEqual(len(data), 2)
for row in data:
self.assertIn(row["id"], [ix.id for ix in exchanges])
@@ -2060,8 +2138,9 @@ def test_guest_005_list_filter_ix_asn_overlap(self):
# query #4 - should error when passing too many asns
with self.assertRaises(InvalidRequestException):
- self.db_guest.all("ix", asn_overlap=",".join(
- [str(i) for i in range(0, 30)]))
+ self.db_guest.all(
+ "ix", asn_overlap=",".join([str(i) for i in range(0, 30)])
+ )
# clean up data
for net in networks:
@@ -2139,8 +2218,6 @@ def test_guest_005_list_filter_ix_net_count(self):
self.assert_data_integrity(row, "ix")
self.assertGreaterEqual(row["net_count"], 1)
-
-
##########################################################################
def test_guest_005_list_filter_fac_asn_overlap(self):
@@ -2158,25 +2235,29 @@ def test_guest_005_list_filter_fac_asn_overlap(self):
# all three networks peer at first facility
for net in networks:
- NetworkFacility.objects.create(network=net, facility=facilities[0],
- status="ok")
+ NetworkFacility.objects.create(
+ network=net, facility=facilities[0], status="ok"
+ )
# only the first two networks peer at second facility
for net in networks[:2]:
- NetworkFacility.objects.create(network=net, facility=facilities[1],
- status="ok")
+ NetworkFacility.objects.create(
+ network=net, facility=facilities[1], status="ok"
+ )
# do test queries
# query #1 - test overlapping facilities for all 3 asns - should return first facility
- data = self.db_guest.all("fac", asn_overlap=",".join(
- [str(net.asn) for net in networks]))
+ data = self.db_guest.all(
+ "fac", asn_overlap=",".join([str(net.asn) for net in networks])
+ )
self.assertEqual(len(data), 1)
self.assertEqual(data[0]["id"], facilities[0].id)
# query #2 - test overlapping facilities for first 2 asns - should return both facs
- data = self.db_guest.all("fac", asn_overlap=",".join(
- [str(net.asn) for net in networks[:2]]))
+ data = self.db_guest.all(
+ "fac", asn_overlap=",".join([str(net.asn) for net in networks[:2]])
+ )
self.assertEqual(len(data), 2)
for row in data:
self.assertIn(row["id"], [ix.id for ix in facilities])
@@ -2187,8 +2268,9 @@ def test_guest_005_list_filter_fac_asn_overlap(self):
# query #4 - should error when passing too many asns
with self.assertRaises(InvalidRequestException):
- self.db_guest.all("fac", asn_overlap=",".join(
- [str(i) for i in range(0, 30)]))
+ self.db_guest.all(
+ "fac", asn_overlap=",".join([str(i) for i in range(0, 30)])
+ )
# clean up data
for net in networks:
@@ -2283,26 +2365,27 @@ def test_guest_005_list_filter_accented(self):
test filtering with accented search terms
"""
- #TODO: sqlite3 is being used as the testing backend, and django 1.11
- #seems to be unable to set a collation on it, so we can't properly test
- #the other way atm, for now this test at least confirms that the term is
- #unaccented correctly.
+ # TODO: sqlite3 is being used as the testing backend, and django 1.11
+ # seems to be unable to set a collation on it, so we can't properly test
+ # the other way atm, for now this test at least confirms that the term is
+ # unaccented correctly.
#
- #on production we run mysql with flattened accents so both ways should work
- #there regardless.
+ # on production we run mysql with flattened accents so both ways should work
+ # there regardless.
org = Organization.objects.create(name="org unaccented", status="ok")
- net = Network.objects.create(asn=12345, name=u"net unaccented",
- status="ok", org=org)
- ix = InternetExchange.objects.create(org=org, name=u"ix unaccented", status="ok")
+ net = Network.objects.create(
+ asn=12345, name=u"net unaccented", status="ok", org=org
+ )
+ ix = InternetExchange.objects.create(
+ org=org, name=u"ix unaccented", status="ok"
+ )
fac = Facility.objects.create(org=org, name=u"fac unaccented", status="ok")
- for tag in ["org","net","ix","fac"]:
+ for tag in ["org", "net", "ix", "fac"]:
data = self.db_guest.all(tag, name=u"{} unãccented".format(tag))
self.assertEqual(len(data), 1)
-
-
##########################################################################
# READONLY PERMISSION TESTS
# These tests assert that the readonly users cannot write anything
@@ -2312,222 +2395,318 @@ def test_guest_005_list_filter_accented(self):
def test_readonly_users_003_PUT_org(self):
for db in self.readonly_dbs():
- self.assert_update(db, "org", SHARED["org_r_ok"].id, {},
- test_success=False, test_failures={
- "perms": {}
- })
+ self.assert_update(
+ db,
+ "org",
+ SHARED["org_r_ok"].id,
+ {},
+ test_success=False,
+ test_failures={"perms": {}},
+ )
##########################################################################
def test_readonly_users_002_POST_ix(self):
for db in self.readonly_dbs():
- self.assert_create(db, "ix",
- self.make_data_ix(prefix=self.get_prefix4()),
- test_failures={"perms": {}}, test_success=False)
+ self.assert_create(
+ db,
+ "ix",
+ self.make_data_ix(prefix=self.get_prefix4()),
+ test_failures={"perms": {}},
+ test_success=False,
+ )
##########################################################################
def test_readonly_users_003_PUT_ix(self):
for db in self.readonly_dbs():
- self.assert_update(db, "ix", SHARED["ix_r_ok"].id, {},
- test_success=False, test_failures={
- "perms": {}
- })
+ self.assert_update(
+ db,
+ "ix",
+ SHARED["ix_r_ok"].id,
+ {},
+ test_success=False,
+ test_failures={"perms": {}},
+ )
##########################################################################
def test_readonly_users_004_DELETE_ix(self):
for db in self.readonly_dbs():
- self.assert_delete(db, "ix", test_success=False,
- test_failure=SHARED["ix_r_ok"].id)
+ self.assert_delete(
+ db, "ix", test_success=False, test_failure=SHARED["ix_r_ok"].id
+ )
##########################################################################
def test_readonly_users_002_POST_fac(self):
for db in self.readonly_dbs():
- self.assert_create(db, "fac", self.make_data_fac(),
- test_failures={"perms": {}}, test_success=False)
+ self.assert_create(
+ db,
+ "fac",
+ self.make_data_fac(),
+ test_failures={"perms": {}},
+ test_success=False,
+ )
##########################################################################
def test_readonly_users_003_PUT_fac(self):
for db in self.readonly_dbs():
- self.assert_update(db, "fac", SHARED["fac_r_ok"].id, {},
- test_success=False, test_failures={
- "perms": {}
- })
+ self.assert_update(
+ db,
+ "fac",
+ SHARED["fac_r_ok"].id,
+ {},
+ test_success=False,
+ test_failures={"perms": {}},
+ )
##########################################################################
def test_readonly_users_004_DELETE_fac(self):
for db in self.readonly_dbs():
- self.assert_delete(db, "fac", test_success=False,
- test_failure=SHARED["fac_r_ok"].id)
+ self.assert_delete(
+ db, "fac", test_success=False, test_failure=SHARED["fac_r_ok"].id
+ )
##########################################################################
def test_readonly_users_002_POST_netfac(self):
for db in self.readonly_dbs():
self.assert_create(
- db, "netfac", {
+ db,
+ "netfac",
+ {
"net_id": SHARED["net_r_ok"].id,
"fac_id": SHARED["fac_r2_ok"].id,
- "local_asn": 12345
- }, test_failures={"perms": {}}, test_success=False)
+ "local_asn": 12345,
+ },
+ test_failures={"perms": {}},
+ test_success=False,
+ )
##########################################################################
def test_readonly_users_003_PUT_netfac(self):
for db in self.readonly_dbs():
- self.assert_update(db, "netfac", SHARED["netfac_r_ok"].id, {},
- test_success=False, test_failures={
- "perms": {}
- })
+ self.assert_update(
+ db,
+ "netfac",
+ SHARED["netfac_r_ok"].id,
+ {},
+ test_success=False,
+ test_failures={"perms": {}},
+ )
##########################################################################
def test_readonly_users_004_DELETE_netfac(self):
for db in self.readonly_dbs():
- self.assert_delete(db, "netfac", test_success=False,
- test_failure=SHARED["netfac_r_ok"].id)
+ self.assert_delete(
+ db, "netfac", test_success=False, test_failure=SHARED["netfac_r_ok"].id
+ )
##########################################################################
def test_readonly_users_002_POST_ixfac(self):
for db in self.readonly_dbs():
- self.assert_create(db, "ixfac", {
- "ix_id": SHARED["ix_r_ok"].id,
- "fac_id": SHARED["fac_r2_ok"].id
- }, test_failures={"perms": {}}, test_success=False)
+ self.assert_create(
+ db,
+ "ixfac",
+ {"ix_id": SHARED["ix_r_ok"].id, "fac_id": SHARED["fac_r2_ok"].id},
+ test_failures={"perms": {}},
+ test_success=False,
+ )
##########################################################################
def test_readonly_users_003_PUT_ixfac(self):
for db in self.readonly_dbs():
- self.assert_update(db, "ixfac", SHARED["ixfac_r_ok"].id, {},
- test_success=False, test_failures={
- "perms": {}
- })
+ self.assert_update(
+ db,
+ "ixfac",
+ SHARED["ixfac_r_ok"].id,
+ {},
+ test_success=False,
+ test_failures={"perms": {}},
+ )
##########################################################################
def test_readonly_users_004_DELETE_ixfac(self):
for db in self.readonly_dbs():
- self.assert_delete(db, "ixfac", test_success=False,
- test_failure=SHARED["ixfac_r_ok"].id)
+ self.assert_delete(
+ db, "ixfac", test_success=False, test_failure=SHARED["ixfac_r_ok"].id
+ )
##########################################################################
def test_readonly_users_002_POST_poc(self):
for db in self.readonly_dbs():
self.assert_create(
- db, "poc", self.make_data_poc(net_id=SHARED["net_rw_ok"].id),
- test_failures={"perms": {}}, test_success=False)
+ db,
+ "poc",
+ self.make_data_poc(net_id=SHARED["net_rw_ok"].id),
+ test_failures={"perms": {}},
+ test_success=False,
+ )
##########################################################################
def test_readonly_users_003_PUT_poc(self):
for db in self.readonly_dbs(exclude=[self.db_user]):
- self.assert_update(db, "poc", SHARED["poc_r_ok_public"].id, {},
- test_success=False, test_failures={
- "perms": {}
- })
- self.assert_update(db, "poc", SHARED["poc_r_ok_private"].id, {},
- test_success=False, test_failures={
- "perms": {}
- })
- self.assert_update(db, "poc", SHARED["poc_r_ok_users"].id, {},
- test_success=False, test_failures={
- "perms": {}
- })
+ self.assert_update(
+ db,
+ "poc",
+ SHARED["poc_r_ok_public"].id,
+ {},
+ test_success=False,
+ test_failures={"perms": {}},
+ )
+ self.assert_update(
+ db,
+ "poc",
+ SHARED["poc_r_ok_private"].id,
+ {},
+ test_success=False,
+ test_failures={"perms": {}},
+ )
+ self.assert_update(
+ db,
+ "poc",
+ SHARED["poc_r_ok_users"].id,
+ {},
+ test_success=False,
+ test_failures={"perms": {}},
+ )
##########################################################################
def test_readonly_users_004_DELETE_poc(self):
for db in self.readonly_dbs():
- self.assert_delete(db, "poc", test_success=False,
- test_failure=SHARED["poc_r_ok_public"].id)
- self.assert_delete(db, "poc", test_success=False,
- test_failure=SHARED["poc_r_ok_private"].id)
- self.assert_delete(db, "poc", test_success=False,
- test_failure=SHARED["poc_r_ok_users"].id)
+ self.assert_delete(
+ db, "poc", test_success=False, test_failure=SHARED["poc_r_ok_public"].id
+ )
+ self.assert_delete(
+ db,
+ "poc",
+ test_success=False,
+ test_failure=SHARED["poc_r_ok_private"].id,
+ )
+ self.assert_delete(
+ db, "poc", test_success=False, test_failure=SHARED["poc_r_ok_users"].id
+ )
##########################################################################
def test_readonly_users_002_POST_ixlan(self):
for db in self.readonly_dbs():
- self.assert_create(db, "ixlan", self.make_data_ixlan(),
- test_failures={"perms": {}}, test_success=False)
+ self.assert_create(
+ db,
+ "ixlan",
+ self.make_data_ixlan(),
+ test_failures={"perms": {}},
+ test_success=False,
+ )
##########################################################################
def test_readonly_users_003_PUT_ixlan(self):
for db in self.readonly_dbs():
- self.assert_update(db, "ixlan", SHARED["ixlan_r_ok"].id, {},
- test_success=False, test_failures={
- "perms": {}
- })
+ self.assert_update(
+ db,
+ "ixlan",
+ SHARED["ixlan_r_ok"].id,
+ {},
+ test_success=False,
+ test_failures={"perms": {}},
+ )
##########################################################################
def test_readonly_users_004_DELETE_ixlan(self):
for db in self.readonly_dbs():
- self.assert_delete(db, "ixlan", test_success=False,
- test_failure=SHARED["ixlan_r_ok"].id)
+ self.assert_delete(
+ db, "ixlan", test_success=False, test_failure=SHARED["ixlan_r_ok"].id
+ )
##########################################################################
def test_readonly_users_002_POST_ixpfx(self):
for db in self.readonly_dbs():
- self.assert_create(db, "ixpfx",
- self.make_data_ixpfx(prefix="200.100.200.0/22"),
- test_failures={"perms": {}}, test_success=False)
+ self.assert_create(
+ db,
+ "ixpfx",
+ self.make_data_ixpfx(prefix="200.100.200.0/22"),
+ test_failures={"perms": {}},
+ test_success=False,
+ )
##########################################################################
def test_readonly_users_003_PUT_ixpfx(self):
for db in self.readonly_dbs():
- self.assert_update(db, "ixpfx", SHARED["ixpfx_r_ok"].id, {},
- test_success=False, test_failures={
- "perms": {}
- })
+ self.assert_update(
+ db,
+ "ixpfx",
+ SHARED["ixpfx_r_ok"].id,
+ {},
+ test_success=False,
+ test_failures={"perms": {}},
+ )
##########################################################################
def test_readonly_users_004_DELETE_ixpfx(self):
for db in self.readonly_dbs():
- self.assert_delete(db, "ixpfx", test_success=False,
- test_failure=SHARED["ixpfx_r_ok"].id)
+ self.assert_delete(
+ db, "ixpfx", test_success=False, test_failure=SHARED["ixpfx_r_ok"].id
+ )
##########################################################################
def test_readonly_users_002_POST_netixlan(self):
for db in self.readonly_dbs():
- self.assert_create(db, "netixlan", self.make_data_netixlan(),
- test_failures={"perms": {}}, test_success=False)
+ self.assert_create(
+ db,
+ "netixlan",
+ self.make_data_netixlan(),
+ test_failures={"perms": {}},
+ test_success=False,
+ )
##########################################################################
def test_readonly_users_003_PUT_netixlan(self):
for db in self.readonly_dbs():
- self.assert_update(db, "netixlan", SHARED["netixlan_r_ok"].id, {},
- test_success=False, test_failures={
- "perms": {}
- })
+ self.assert_update(
+ db,
+ "netixlan",
+ SHARED["netixlan_r_ok"].id,
+ {},
+ test_success=False,
+ test_failures={"perms": {}},
+ )
##########################################################################
def test_readonly_users_004_DELETE_netixlan(self):
for db in self.readonly_dbs():
- self.assert_delete(db, "netixlan", test_success=False,
- test_failure=SHARED["netixlan_r_ok"].id)
+ self.assert_delete(
+ db,
+ "netixlan",
+ test_success=False,
+ test_failure=SHARED["netixlan_r_ok"].id,
+ )
##########################################################################
def test_readonly_users_004_DELETE_org(self):
for db in self.readonly_dbs():
- self.assert_delete(db, "org", test_success=False,
- test_failure=SHARED["org_r_ok"].id)
+ self.assert_delete(
+ db, "org", test_success=False, test_failure=SHARED["org_r_ok"].id
+ )
##########################################################################
# CRUD PERMISSION TESTS
@@ -2537,50 +2716,66 @@ def test_z_crud_002_create(self):
# user with create perms should be allowed to create a new poc under net_rw3_ok
# but not under net_rw2_ok
- self.assert_create(self.db_crud_create, "poc",
- self.make_data_poc(net_id=SHARED["net_rw3_ok"].id),
- test_failures={
- "perms": {
- "net_id": SHARED["net_rw2_ok"].id
- }
- })
+ self.assert_create(
+ self.db_crud_create,
+ "poc",
+ self.make_data_poc(net_id=SHARED["net_rw3_ok"].id),
+ test_failures={"perms": {"net_id": SHARED["net_rw2_ok"].id}},
+ )
# user with create perms should not be able to create an ixlan under
# net_rw_ix
- self.assert_create(self.db_crud_create, "ixlan",
- self.make_data_ixlan(ix_id=SHARED["ix_rw3_ok"].id),
- test_failures={"perms": {}}, test_success=False)
+ self.assert_create(
+ self.db_crud_create,
+ "ixlan",
+ self.make_data_ixlan(ix_id=SHARED["ix_rw3_ok"].id),
+ test_failures={"perms": {}},
+ test_success=False,
+ )
# other crud test users should not be able to create a new poc under
# net_rw3_ok
for p in ["delete", "update"]:
self.assert_create(
- getattr(self, "db_crud_%s" % p), "poc",
+ getattr(self, "db_crud_%s" % p),
+ "poc",
self.make_data_poc(net_id=SHARED["net_rw3_ok"].id),
- test_failures={"perms": {}}, test_success=False)
+ test_failures={"perms": {}},
+ test_success=False,
+ )
def test_z_crud_003_update(self):
# user with update perms should be allowed to update net_rw3_ok
# but not net_rw2_ok
- self.assert_update(self.db_crud_update, "net", SHARED["net_rw3_ok"].id,
- {"name": self.make_name("Test")}, test_failures={
- "perms": {
- "id": SHARED["net_rw2_ok"].id
- }
- })
+ self.assert_update(
+ self.db_crud_update,
+ "net",
+ SHARED["net_rw3_ok"].id,
+ {"name": self.make_name("Test")},
+ test_failures={"perms": {"id": SHARED["net_rw2_ok"].id}},
+ )
# user with update perms should not be allowed to update ix_rw3_ok
- self.assert_update(self.db_crud_update, "ix", SHARED["ix_rw3_ok"].id,
- {"name": self.make_name("Test")},
- test_failures={"perms": {}}, test_success=False)
+ self.assert_update(
+ self.db_crud_update,
+ "ix",
+ SHARED["ix_rw3_ok"].id,
+ {"name": self.make_name("Test")},
+ test_failures={"perms": {}},
+ test_success=False,
+ )
# other crud test users should not be able to update net_rw3_ok
for p in ["delete", "create"]:
self.assert_update(
- getattr(self, "db_crud_%s" % p), "net",
- SHARED["net_rw3_ok"].id, {"name": self.make_name("Test")},
- test_failures={"perms": {}}, test_success=False)
+ getattr(self, "db_crud_%s" % p),
+ "net",
+ SHARED["net_rw3_ok"].id,
+ {"name": self.make_name("Test")},
+ test_failures={"perms": {}},
+ test_success=False,
+ )
def test_z_crud_004_delete(self):
@@ -2595,8 +2790,12 @@ def test_z_crud_004_delete(self):
# user with delete perms should be allowed to update net_rw3_ok
# but not net_rw2_ok
- self.assert_delete(self.db_crud_delete, "net", SHARED["net_rw3_ok"].id,
- test_failure=SHARED["net_rw2_ok"].id)
+ self.assert_delete(
+ self.db_crud_delete,
+ "net",
+ SHARED["net_rw3_ok"].id,
+ test_failure=SHARED["net_rw2_ok"].id,
+ )
# user with delete perms should not be allowed to delete ix_rw3_ok
self.assert_delete(
@@ -2615,24 +2814,21 @@ def test_z_misc_002_dupe_netixlan_ip(self):
# test that addint duplicate netixlan ips is impossible
A = SHARED["netixlan_rw_ok"]
- self.assert_create(self.db_org_admin, "netixlan",
- self.make_data_netixlan(ixlan_id=A.ixlan_id,
- net_id=A.network_id),
- test_success=False, test_failures={
- "invalid": {
- "ipaddr4": unicode(A.ipaddr4)
- }
- })
-
- self.assert_create(self.db_org_admin, "netixlan",
- self.make_data_netixlan(
- ixlan_id=A.ixlan_id,
- net_id=A.network_id,
- ), test_success=False, test_failures={
- "invalid": {
- "ipaddr6": unicode(A.ipaddr6)
- }
- })
+ self.assert_create(
+ self.db_org_admin,
+ "netixlan",
+ self.make_data_netixlan(ixlan_id=A.ixlan_id, net_id=A.network_id),
+ test_success=False,
+ test_failures={"invalid": {"ipaddr4": unicode(A.ipaddr4)}},
+ )
+
+ self.assert_create(
+ self.db_org_admin,
+ "netixlan",
+ self.make_data_netixlan(ixlan_id=A.ixlan_id, net_id=A.network_id,),
+ test_success=False,
+ test_failures={"invalid": {"ipaddr6": unicode(A.ipaddr6)}},
+ )
def test_z_misc_002_dupe_name_update(self):
@@ -2646,11 +2842,13 @@ def test_z_misc_002_dupe_name_update(self):
self.assertEqual(A.status, "ok")
self.assertEqual(B.status, "deleted")
- self.assert_update(self.db_org_admin, "fac", A.id, {}, test_failures={
- "invalid": {
- "name": B.name
- }
- })
+ self.assert_update(
+ self.db_org_admin,
+ "fac",
+ A.id,
+ {},
+ test_failures={"invalid": {"name": B.name}},
+ )
B.refresh_from_db()
self.assertEqual(B.status, "deleted")
@@ -2661,17 +2859,20 @@ def test_z_misc_001_org_create(self):
# at this point in time
for db in self.all_dbs():
- self.assert_create(db, "org",
- self.make_data_org(name=self.make_name("Test")),
- test_success=False, test_failures={
- "perms": {}
- })
+ self.assert_create(
+ db,
+ "org",
+ self.make_data_org(name=self.make_name("Test")),
+ test_success=False,
+ test_failures={"perms": {}},
+ )
def test_z_misc_001_suggest_net(self):
# test network suggestions
data = self.make_data_net(
- asn=9000901, org_id=settings.SUGGEST_ENTITY_ORG, suggest=True)
+ asn=9000901, org_id=settings.SUGGEST_ENTITY_ORG, suggest=True
+ )
r_data = self.assert_create(self.db_user, "net", data)
@@ -2682,18 +2883,17 @@ def test_z_misc_001_suggest_net(self):
self.assertEqual(net.org_id, settings.SUGGEST_ENTITY_ORG)
data = self.make_data_net(
- asn=9000902, org_id=settings.SUGGEST_ENTITY_ORG, suggest=True)
+ asn=9000902, org_id=settings.SUGGEST_ENTITY_ORG, suggest=True
+ )
- r_data = self.assert_create(self.db_guest, "net", data,
- test_success=False, test_failures={
- "perms": {}
- })
+ r_data = self.assert_create(
+ self.db_guest, "net", data, test_success=False, test_failures={"perms": {}}
+ )
def test_z_misc_001_suggest_fac(self):
# test facility suggestions
- data = self.make_data_fac(org_id=settings.SUGGEST_ENTITY_ORG,
- suggest=True)
+ data = self.make_data_fac(org_id=settings.SUGGEST_ENTITY_ORG, suggest=True)
r_data = self.assert_create(self.db_user, "fac", data)
@@ -2703,22 +2903,22 @@ def test_z_misc_001_suggest_fac(self):
fac = Facility.objects.get(id=r_data["id"])
self.assertEqual(fac.org_id, settings.SUGGEST_ENTITY_ORG)
- data = self.make_data_fac(org_id=settings.SUGGEST_ENTITY_ORG,
- suggest=True)
+ data = self.make_data_fac(org_id=settings.SUGGEST_ENTITY_ORG, suggest=True)
- r_data = self.assert_create(self.db_guest, "fac", data,
- test_success=False, test_failures={
- "perms": {}
- })
+ r_data = self.assert_create(
+ self.db_guest, "fac", data, test_success=False, test_failures={"perms": {}}
+ )
def test_z_misc_001_suggest_ix(self):
# test exchange suggestions
- data = self.make_data_ix(org_id=settings.SUGGEST_ENTITY_ORG,
- suggest=True, prefix=self.get_prefix4())
+ data = self.make_data_ix(
+ org_id=settings.SUGGEST_ENTITY_ORG, suggest=True, prefix=self.get_prefix4()
+ )
- r_data = self.assert_create(self.db_user, "ix", data,
- ignore=["prefix", "suggest"])
+ r_data = self.assert_create(
+ self.db_user, "ix", data, ignore=["prefix", "suggest"]
+ )
self.assertEqual(r_data["org_id"], settings.SUGGEST_ENTITY_ORG)
self.assertEqual(r_data["status"], "pending")
@@ -2726,14 +2926,18 @@ def test_z_misc_001_suggest_ix(self):
ix = InternetExchange.objects.get(id=r_data["id"])
self.assertEqual(ix.org_id, settings.SUGGEST_ENTITY_ORG)
- data = self.make_data_ix(org_id=settings.SUGGEST_ENTITY_ORG,
- suggest=True, prefix=self.get_prefix4())
+ data = self.make_data_ix(
+ org_id=settings.SUGGEST_ENTITY_ORG, suggest=True, prefix=self.get_prefix4()
+ )
- r_data = self.assert_create(self.db_guest, "ix", data, ignore=[
- "prefix", "suggest"
- ], test_success=False, test_failures={
- "perms": {}
- })
+ r_data = self.assert_create(
+ self.db_guest,
+ "ix",
+ data,
+ ignore=["prefix", "suggest"],
+ test_success=False,
+ test_failures={"perms": {}},
+ )
def test_z_misc_001_suggest_outside_of_post(self):
# The `suggest` keyword should only be allowed for
@@ -2742,12 +2946,13 @@ def test_z_misc_001_suggest_outside_of_post(self):
for reftag in ["ix", "fac", "net"]:
ent = SHARED["{}_rw_ok".format(reftag)]
org_id = ent.org_id
- self.assert_update(self.db_org_admin, reftag, ent.id,
- {"notes": "bla"}, test_failures={
- "invalid": {
- "suggest": True
- }
- })
+ self.assert_update(
+ self.db_org_admin,
+ reftag,
+ ent.id,
+ {"notes": "bla"},
+ test_failures={"invalid": {"suggest": True}},
+ )
ent.refresh_from_db()
self.assertEqual(ent.org_id, org_id)
@@ -2759,9 +2964,9 @@ def test_z_misc_001_fac_address_geocode(self):
fac.geocode_status = True
fac.save()
- self.assert_update(self.db_org_admin, "fac", fac.id, {
- "address1": "This is a test"
- })
+ self.assert_update(
+ self.db_org_admin, "fac", fac.id, {"address1": "This is a test"}
+ )
fac.refresh_from_db()
self.assertEqual(fac.geocode_status, False)
@@ -2773,10 +2978,12 @@ def test_z_misc_001_fac_address_geocode(self):
# test that facility does NOT get marked for geocode sync after non relevant
# fields are changed
- self.assert_update(self.db_org_admin, "fac", fac.id, {
- "website": "http://example.com",
- "name": fac.name + " Geocode Test"
- })
+ self.assert_update(
+ self.db_org_admin,
+ "fac",
+ fac.id,
+ {"website": "http://example.com", "name": fac.name + " Geocode Test"},
+ )
fac.refresh_from_db()
self.assertEqual(fac.geocode_status, True)
@@ -2786,17 +2993,21 @@ class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument("--only", help="only run this test", dest="only")
- parser.add_argument("--setup",
- help="runs api test setup (user, org create) only",
- dest="setup", action="store_true")
+ parser.add_argument(
+ "--setup",
+ help="runs api test setup (user, org create) only",
+ dest="setup",
+ action="store_true",
+ )
@classmethod
def log(cls, msg):
print msg
@classmethod
- def create_entity(cls, model, prefix="rw", unset=[], key_suffix=None,
- name_suffix=None, **kwargs):
+ def create_entity(
+ cls, model, prefix="rw", unset=[], key_suffix=None, name_suffix=None, **kwargs
+ ):
tag = model.handleref.tag
status = kwargs.get("status", "ok")
name = "API Test:%s:%s:%s" % (tag.upper(), prefix.upper(), status)
@@ -2817,7 +3028,8 @@ def create_entity(cls, model, prefix="rw", unset=[], key_suffix=None,
obj = model.objects.get(**data)
cls.log(
"%s with status '%s' for %s testing already exists, skipping!"
- % (tag.upper(), status, prefix.upper()))
+ % (tag.upper(), status, prefix.upper())
+ )
except model.DoesNotExist:
fn = getattr(TestJSON, "make_data_%s" % tag, None)
if fn:
@@ -2826,8 +3038,10 @@ def create_entity(cls, model, prefix="rw", unset=[], key_suffix=None,
if k in data:
del data[k]
obj = model.objects.create(**data)
- cls.log("%s with status '%s' for %s testing created! (%s)" %
- (tag.upper(), status, prefix.upper(), obj.updated))
+ cls.log(
+ "%s with status '%s' for %s testing created! (%s)"
+ % (tag.upper(), status, prefix.upper(), obj.updated)
+ )
id = "%s_%s_%s" % (tag, prefix, status)
if key_suffix:
@@ -2909,8 +3123,7 @@ def prepare(cls, *args, **options):
"ORG for WRITE testing (with status pending) already exists, skipping!"
)
except Organization.DoesNotExist:
- org_rwp = Organization.objects.create(status="pending",
- name=ORG_RW_PENDING)
+ org_rwp = Organization.objects.create(status="pending", name=ORG_RW_PENDING)
cls.log("ORG for WRITE testing (with status pending) created!")
org_rwp.admin_usergroup.user_set.add(user_org_admin)
@@ -2935,11 +3148,11 @@ def prepare(cls, *args, **options):
try:
net_rd = Network.objects.get(name=NET_R_DELETED, org_id=org_r.id)
- cls.log(
- "NET for status 'deleted' testing already exists, skipping!")
+ cls.log("NET for status 'deleted' testing already exists, skipping!")
except Network.DoesNotExist:
- net_rd = Network.objects.create(**TestJSON.make_data_net(
- name=NET_R_DELETED, org_id=org_r.id))
+ net_rd = Network.objects.create(
+ **TestJSON.make_data_net(name=NET_R_DELETED, org_id=org_r.id)
+ )
cls.log("NET for status 'deleted' testing created!")
net_rd.delete()
@@ -2950,31 +3163,51 @@ def prepare(cls, *args, **options):
for model in [Network, Facility, InternetExchange]:
for status in ["ok", "pending"]:
for prefix in ["r", "rw"]:
- cls.create_entity(model, status=status, prefix=prefix,
- org_id=SHARED["org_%s_%s" % (prefix,
- status)].id)
cls.create_entity(
- model, status=status, prefix="%s2" % prefix,
- org_id=SHARED["org_%s_%s" % (prefix, status)].id)
+ model,
+ status=status,
+ prefix=prefix,
+ org_id=SHARED["org_%s_%s" % (prefix, status)].id,
+ )
cls.create_entity(
- model, status=status, prefix="%s3" % prefix,
- org_id=SHARED["org_%s_%s" % (prefix, status)].id)
+ model,
+ status=status,
+ prefix="%s2" % prefix,
+ org_id=SHARED["org_%s_%s" % (prefix, status)].id,
+ )
+ cls.create_entity(
+ model,
+ status=status,
+ prefix="%s3" % prefix,
+ org_id=SHARED["org_%s_%s" % (prefix, status)].id,
+ )
# create entities for duplicate validation testing
for model in [Network, Facility, InternetExchange]:
- cls.create_entity(model, status="deleted", prefix="rw_dupe",
- name_suffix=" DUPE",
- org_id=SHARED["org_rw_ok"].id)
- cls.create_entity(model, status="ok", prefix="rw_dupe",
- name_suffix=" DUPE !",
- org_id=SHARED["org_rw_ok"].id)
+ cls.create_entity(
+ model,
+ status="deleted",
+ prefix="rw_dupe",
+ name_suffix=" DUPE",
+ org_id=SHARED["org_rw_ok"].id,
+ )
+ cls.create_entity(
+ model,
+ status="ok",
+ prefix="rw_dupe",
+ name_suffix=" DUPE !",
+ org_id=SHARED["org_rw_ok"].id,
+ )
for status in ["ok", "pending"]:
for prefix in ["r", "rw"]:
- cls.create_entity(IXLan, status=status, prefix=prefix,
- ix_id=SHARED["ix_%s_%s" % (prefix,
- status)].id)
+ cls.create_entity(
+ IXLan,
+ status=status,
+ prefix=prefix,
+ ix_id=SHARED["ix_%s_%s" % (prefix, status)].id,
+ )
cls.create_entity(
IXLanPrefix,
status=status,
@@ -2990,49 +3223,68 @@ def prepare(cls, *args, **options):
ixlan_id=SHARED["ixlan_%s_%s" % (prefix, status)].id,
)
cls.create_entity(
- InternetExchangeFacility, status=status, prefix=prefix,
+ InternetExchangeFacility,
+ status=status,
+ prefix=prefix,
facility_id=SHARED["fac_%s_%s" % (prefix, status)].id,
- ix_id=SHARED["ix_%s_%s" % (prefix, status)].id)
+ ix_id=SHARED["ix_%s_%s" % (prefix, status)].id,
+ )
cls.create_entity(
- NetworkFacility, status=status, prefix=prefix, unset=[
- "net_id"
- ], facility_id=SHARED["fac_%s_%s" % (prefix, status)].id,
- network_id=SHARED["net_%s_%s" % (prefix, status)].id)
+ NetworkFacility,
+ status=status,
+ prefix=prefix,
+ unset=["net_id"],
+ facility_id=SHARED["fac_%s_%s" % (prefix, status)].id,
+ network_id=SHARED["net_%s_%s" % (prefix, status)].id,
+ )
cls.create_entity(
- NetworkIXLan, status=status, prefix=prefix, unset=[
- "net_id"
- ], ixlan_id=SHARED["ixlan_%s_%s" % (prefix, status)].id,
- network_id=SHARED["net_%s_%s" % (prefix, status)].id)
+ NetworkIXLan,
+ status=status,
+ prefix=prefix,
+ unset=["net_id"],
+ ixlan_id=SHARED["ixlan_%s_%s" % (prefix, status)].id,
+ network_id=SHARED["net_%s_%s" % (prefix, status)].id,
+ )
for v in ["Private", "Users", "Public"]:
- cls.create_entity(NetworkContact, status=status,
- prefix=prefix, visible=v,
- network_id=SHARED["net_%s_%s" %
- (prefix, status)].id,
- unset=["net_id"], key_suffix=v.lower())
+ cls.create_entity(
+ NetworkContact,
+ status=status,
+ prefix=prefix,
+ visible=v,
+ network_id=SHARED["net_%s_%s" % (prefix, status)].id,
+ unset=["net_id"],
+ key_suffix=v.lower(),
+ )
# set up permissions for crud permission tests
crud_users["delete"].userpermission_set.create(
namespace=SHARED["net_rw3_ok"].nsp_namespace,
- permissions=PERM_READ | PERM_DELETE)
+ permissions=PERM_READ | PERM_DELETE,
+ )
crud_users["create"].userpermission_set.create(
namespace=SHARED["net_rw3_ok"].nsp_namespace,
- permissions=PERM_READ | PERM_CREATE)
+ permissions=PERM_READ | PERM_CREATE,
+ )
crud_users["update"].userpermission_set.create(
namespace=SHARED["net_rw3_ok"].nsp_namespace,
- permissions=PERM_READ | PERM_UPDATE)
+ permissions=PERM_READ | PERM_UPDATE,
+ )
# undelete in case they got flagged as deleted
for name, obj in SHARED.items():
- if hasattr(
- obj, "status"
- ) and obj.status == "deleted" and obj != net_rd and getattr(
- obj, "name", "").find("DUPE") == -1:
+ if (
+ hasattr(obj, "status")
+ and obj.status == "deleted"
+ and obj != net_rd
+ and getattr(obj, "name", "").find("DUPE") == -1
+ ):
obj.status = "ok"
obj.save()
- Organization.objects.create(name="Suggested Entitites", status="ok",
- id=settings.SUGGEST_ENTITY_ORG)
+ Organization.objects.create(
+ name="Suggested Entitites", status="ok", id=settings.SUGGEST_ENTITY_ORG
+ )
cls.log("Setup for API testing completed!")
@@ -3072,9 +3324,9 @@ def handle(self, *args, **options):
self.cleanup()
print "Cleaned up after inegrity error, please try again .."
return
- if options['setup']:
+ if options["setup"]:
return
- if not options['only']:
+ if not options["only"]:
suite = unittest.TestLoader().loadTestsFromTestCase(TestJSON)
else:
only = options["only"].split(",")
@@ -3084,7 +3336,8 @@ def handle(self, *args, **options):
if key[:5] == "test_" and key.find(o) > -1:
funcs.append(
"peeringdb_server.management.commands.pdb_api_test.TestJSON.%s"
- % key)
+ % key
+ )
funcs = sorted(funcs)
diff --git a/peeringdb_server/management/commands/pdb_batch_replace.py b/peeringdb_server/management/commands/pdb_batch_replace.py
index cc1ee5b0..1a05cf29 100644
--- a/peeringdb_server/management/commands/pdb_batch_replace.py
+++ b/peeringdb_server/management/commands/pdb_batch_replace.py
@@ -15,13 +15,13 @@ class Command(BaseCommand):
valid_targets = {"fac": ["name", "org_id"]}
def add_arguments(self, parser):
- parser.add_argument('--commit', action='store_true',
- help="will commit the fac merge")
- parser.add_argument('--search',
- help=".:")
parser.add_argument(
- '--replace',
- help="::")
+ "--commit", action="store_true", help="will commit the fac merge"
+ )
+ parser.add_argument("--search", help=".:")
+ parser.add_argument(
+ "--replace", help="::"
+ )
def log(self, msg):
if not self.commit:
@@ -37,19 +37,20 @@ def handle(self, *args, **options):
self.replace = options.get("replace")
if not self.search:
- raise CommandError(
- "Specify search parameters using the --search option")
+ raise CommandError("Specify search parameters using the --search option")
if not self.replace:
raise CommandError(
- "Specify replacement parameters using the --replace option")
+ "Specify replacement parameters using the --replace option"
+ )
try:
search_field, search_value = self.search.split(":")
ref_tag, search_field = search_field.split(".")
except:
raise CommandError(
- "Format for --search: .:")
+ "Format for --search: .:"
+ )
try:
m = re.match("^([^:]+):([^:]+):(.+)$", self.replace)
@@ -61,13 +62,15 @@ def handle(self, *args, **options):
"Format for --replace: ::"
)
- #if replace_field not in self.valid_targets.get(ref_tag,[]):
+ # if replace_field not in self.valid_targets.get(ref_tag,[]):
# raise CommandError("%s.%s is not a valid target for this script at this point, please add it to the valid_targets map" % (ref_tag, replace_field))
self.target = "%s.%s" % (ref_tag, search_field)
- self.log("Searching for %s where %s matches '%s' ..." %
- (ref_tag, search_field, search_value))
+ self.log(
+ "Searching for %s where %s matches '%s' ..."
+ % (ref_tag, search_field, search_value)
+ )
q = pdbm.REFTAG_MAP[ref_tag].objects.filter(status="ok")
c = 0
@@ -92,8 +95,10 @@ def handle(self, *args, **options):
r_val = replace_value
if r_val is None:
continue
- self.log("(<%s> id:%s) Changing %s from '%s' to '%s'" %
- (e, e.id, replace_field, t_val, r_val))
+ self.log(
+ "(<%s> id:%s) Changing %s from '%s' to '%s'"
+ % (e, e.id, replace_field, t_val, r_val)
+ )
c += 1
if self.commit:
setattr(e, replace_field, r_val)
diff --git a/peeringdb_server/management/commands/pdb_deskpro_publish.py b/peeringdb_server/management/commands/pdb_deskpro_publish.py
index 9ca654fd..2ce1a712 100644
--- a/peeringdb_server/management/commands/pdb_deskpro_publish.py
+++ b/peeringdb_server/management/commands/pdb_deskpro_publish.py
@@ -22,7 +22,8 @@ def handle(self, *args, **options):
client = APIClient(settings.DESKPRO_URL, settings.DESKPRO_KEY)
self.log(u"DESKPRO: {}".format(settings.DESKPRO_URL))
ticket_qs = models.DeskProTicket.objects.filter(
- published__isnull=True).order_by("created")
+ published__isnull=True
+ ).order_by("created")
if not ticket_qs.count():
self.log("No tickets in queue")
@@ -33,16 +34,17 @@ def handle(self, *args, **options):
try:
client.create_ticket(ticket)
- ticket.published = datetime.datetime.now().replace(
- tzinfo=models.UTC())
+ ticket.published = datetime.datetime.now().replace(tzinfo=models.UTC())
ticket.save()
except APIError as exc:
self.log(
- u"!!!! Could not create ticket #{} - error data has been attached to ticket body.".
- format(ticket.id))
- ticket.published = datetime.datetime.now().replace(
- tzinfo=models.UTC())
+ u"!!!! Could not create ticket #{} - error data has been attached to ticket body.".format(
+ ticket.id
+ )
+ )
+ ticket.published = datetime.datetime.now().replace(tzinfo=models.UTC())
ticket.subject = u"[FAILED] {}".format(ticket.subject)
ticket.body = u"{}\nAPI Delivery Error: {}".format(
- ticket.body, exc.data)
+ ticket.body, exc.data
+ )
ticket.save()
diff --git a/peeringdb_server/management/commands/pdb_deskpro_requeue.py b/peeringdb_server/management/commands/pdb_deskpro_requeue.py
index 528fe30b..678890fa 100644
--- a/peeringdb_server/management/commands/pdb_deskpro_requeue.py
+++ b/peeringdb_server/management/commands/pdb_deskpro_requeue.py
@@ -10,10 +10,12 @@ class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument("id", nargs="?", help="ticket id")
- parser.add_argument('--commit', action='store_true',
- help="will commit the changes")
- parser.add_argument('--only-failed', action='store_true',
- help="only requeue failed tickets")
+ parser.add_argument(
+ "--commit", action="store_true", help="will commit the changes"
+ )
+ parser.add_argument(
+ "--only-failed", action="store_true", help="only requeue failed tickets"
+ )
def log(self, msg):
if self.commit:
@@ -28,12 +30,10 @@ def handle(self, *args, **options):
qset = DeskProTicket.objects
if _id[0] == "g":
- self.log("Requeuing tickets with id greater than {}".format(
- _id[1:]))
+ self.log("Requeuing tickets with id greater than {}".format(_id[1:]))
qset = qset.filter(pk__gt=_id[1:])
elif _id[0] == "l":
- self.log("Requeuing tickets with id smaller than {}".format(
- _id[1:]))
+ self.log("Requeuing tickets with id smaller than {}".format(_id[1:]))
qset = qset.filter(pk__lt=_id[1:])
else:
qset = qset.filter(pk=_id)
@@ -43,7 +43,7 @@ def handle(self, *args, **options):
continue
self.log("Requeuing ticket with id {}".format(ticket.id))
ticket.subject = ticket.subject.replace("[FAILED]", "")
- ticket.body = re.sub(r'API Delivery Error(.+)$', '', ticket.body)
+ ticket.body = re.sub(r"API Delivery Error(.+)$", "", ticket.body)
ticket.published = None
if self.commit:
ticket.save()
diff --git a/peeringdb_server/management/commands/pdb_fac_merge.py b/peeringdb_server/management/commands/pdb_fac_merge.py
index f5148510..83b973bd 100644
--- a/peeringdb_server/management/commands/pdb_fac_merge.py
+++ b/peeringdb_server/management/commands/pdb_fac_merge.py
@@ -7,7 +7,7 @@
def soft_delete(fac, cmd):
- #overriding
+ # overriding
for k in fac._handleref.delete_cascade:
q = getattr(fac, k).exclude(status="deleted")
@@ -26,15 +26,14 @@ class Command(BaseCommand):
pretend = False
def add_arguments(self, parser):
- parser.add_argument('--commit', action='store_true',
- help="will commit the fac merge")
- parser.add_argument('--target',
- help="target of the merge (facility id)")
- parser.add_argument('--ids',
- help="comma separated list of facility ids")
parser.add_argument(
- '--match',
- help="all facs with matching names will be merged (regex)")
+ "--commit", action="store_true", help="will commit the fac merge"
+ )
+ parser.add_argument("--target", help="target of the merge (facility id)")
+ parser.add_argument("--ids", help="comma separated list of facility ids")
+ parser.add_argument(
+ "--match", help="all facs with matching names will be merged (regex)"
+ )
def log(self, msg):
if not self.commit:
@@ -61,7 +60,7 @@ def handle(self, *args, **options):
if self.match:
if self.ids:
- msg = 'ids and match are mutually exclusive'
+ msg = "ids and match are mutually exclusive"
self.log(msg)
raise CommandError(msg)
@@ -72,13 +71,12 @@ def handle(self, *args, **options):
elif self.ids:
self.ids = self.ids.split(",")
- self.log("Merging facilities %s -> %s" % (", ".join(self.ids),
- self.target))
+ self.log("Merging facilities %s -> %s" % (", ".join(self.ids), self.target))
for fac in pdbm.Facility.objects.filter(id__in=self.ids):
facs.append(fac)
else:
- msg = 'IDs or match is required'
+ msg = "IDs or match is required"
self.log(msg)
raise CommandError(msg)
@@ -93,15 +91,18 @@ def handle(self, *args, **options):
if fac.id == self.target.id:
continue
self.log("Merging %s (%d) .." % (fac, fac.id))
- for netfac in pdbm.NetworkFacility.objects.filter(
- facility=fac).exclude(status="deleted"):
+ for netfac in pdbm.NetworkFacility.objects.filter(facility=fac).exclude(
+ status="deleted"
+ ):
netfac_other = pdbm.NetworkFacility.objects.filter(
- facility=self.target, network_id=netfac.network_id)
+ facility=self.target, network_id=netfac.network_id
+ )
# we check if the target fac already has a netfac to the same network (that is currently undeleted), if it does we skip it
if netfac_other.exclude(status="deleted").exists():
self.log(
" - netfac %s : connection already exists at target, skipping."
- % netfac)
+ % netfac
+ )
continue
# if it exists but is currently delete, we simply undelete it
elif netfac_other.exists():
@@ -111,8 +112,7 @@ def handle(self, *args, **options):
netfac_other.avail_ethernet = netfac.avail_ethernet
netfac_other.avail_atm = netfac.avail_atm
netfac_other.status = "ok"
- self.log(" - netfac %s (undeleting and updating)" %
- netfac_other)
+ self.log(" - netfac %s (undeleting and updating)" % netfac_other)
moved.append(netfac_other)
if self.commit:
netfac_other.save()
@@ -125,22 +125,24 @@ def handle(self, *args, **options):
netfac.save()
for ixfac in pdbm.InternetExchangeFacility.objects.filter(
- facility=fac).exclude(status="deleted"):
+ facility=fac
+ ).exclude(status="deleted"):
ixfac_other = pdbm.InternetExchangeFacility.objects.filter(
- facility=self.target, ix=ixfac.ix)
+ facility=self.target, ix=ixfac.ix
+ )
# we check if the target fac already has an ixfac to the same exchange (that is currently undeleted), if it does, we skip it
if ixfac_other.exclude(status="deleted").exists():
self.log(
" - ixfac %s : connection already exists at target, skipping."
- % ixfac)
+ % ixfac
+ )
continue
# if it exists but is currently deleted, we undelete and copy
elif ixfac_other.exists():
ixfac_other = ixfac_other.first()
ixfac_other.status = "ok"
moved.append(ixfac_other)
- self.log(
- " - ixfac %s (undeleting and updating)" % ixfac_other)
+ self.log(" - ixfac %s (undeleting and updating)" % ixfac_other)
if self.commit:
ixfac_other.save()
# if it doesnt exist, we update the facility to the target facility and save
@@ -155,5 +157,7 @@ def handle(self, *args, **options):
if self.commit:
mail_users_entity_merge(
fac.org.admin_usergroup.user_set.all(),
- self.target.org.admin_usergroup.user_set.all(), fac,
- self.target)
+ self.target.org.admin_usergroup.user_set.all(),
+ fac,
+ self.target,
+ )
diff --git a/peeringdb_server/management/commands/pdb_fac_merge_undo.py b/peeringdb_server/management/commands/pdb_fac_merge_undo.py
index d6970d1f..9cb45ddb 100644
--- a/peeringdb_server/management/commands/pdb_fac_merge_undo.py
+++ b/peeringdb_server/management/commands/pdb_fac_merge_undo.py
@@ -2,20 +2,27 @@
import reversion
from django.core.management.base import BaseCommand
-from peeringdb_server.models import (CommandLineTool, Facility,
- NetworkFacility, InternetExchangeFacility)
+from peeringdb_server.models import (
+ CommandLineTool,
+ Facility,
+ NetworkFacility,
+ InternetExchangeFacility,
+)
class Command(BaseCommand):
- help = "Undo a facility merge from merge log (either --log or --clt needs to provided)"
+ help = (
+ "Undo a facility merge from merge log (either --log or --clt needs to provided)"
+ )
def add_arguments(self, parser):
- parser.add_argument('--commit', action='store_true',
- help="will commit the fac merge")
- parser.add_argument('--log', help="merge log file")
parser.add_argument(
- '--clt', help=
- "command line tool instance - this allows you to undo if the command was run from the admin UI"
+ "--commit", action="store_true", help="will commit the fac merge"
+ )
+ parser.add_argument("--log", help="merge log file")
+ parser.add_argument(
+ "--clt",
+ help="command line tool instance - this allows you to undo if the command was run from the admin UI",
)
def log(self, msg):
@@ -34,8 +41,7 @@ def handle(self, *args, **options):
with open(self.log_file, "r") as fh:
log = fh.readlines()
elif self.clt_id:
- clt = CommandLineTool.objects.get(id=self.clt_id,
- tool="pdb_fac_merge")
+ clt = CommandLineTool.objects.get(id=self.clt_id, tool="pdb_fac_merge")
log = clt.result.split("\n")
else:
self.log("[error] no suitable log provided")
@@ -53,16 +59,23 @@ def handle(self, *args, **options):
for line in log:
if re.match(regex_facilities, line):
match = re.match(regex_facilities, line)
- sources = dict([(fac.id, fac)
- for fac in Facility.objects.filter(
- id__in=match.group(1).split(", "))])
+ sources = dict(
+ [
+ (fac.id, fac)
+ for fac in Facility.objects.filter(
+ id__in=match.group(1).split(", ")
+ )
+ ]
+ )
target = Facility.objects.get(id=match.group(2))
for source in sources.values():
if source.org.status != "ok":
self.log(
- "[error] Parent organization {} of facility {} currently has status `{}`, as such the facility cannot be undeleted, please fix the organization and run the script again".
- format(source.org, source, source.org.status))
+ "[error] Parent organization {} of facility {} currently has status `{}`, as such the facility cannot be undeleted, please fix the organization and run the script again".format(
+ source.org, source, source.org.status
+ )
+ )
return
for source in sources.values():
@@ -71,8 +84,7 @@ def handle(self, *args, **options):
"[warning] Looks like this merge has already been undone one way or another, please double check before committing this command"
)
source.status = "ok"
- self.log("Undeleting facility {} (#{})".format(
- source, source.id))
+ self.log("Undeleting facility {} (#{})".format(source, source.id))
if self.commit:
source.save()
@@ -89,8 +101,7 @@ def handle(self, *args, **options):
netfac = NetworkFacility.objects.get(id=match.group(1))
netfac.status = "ok"
netfac.facility = source
- self.log("Undoing network facility merge (#{})".format(
- netfac.id))
+ self.log("Undoing network facility merge (#{})".format(netfac.id))
if self.commit:
netfac.save()
@@ -98,8 +109,7 @@ def handle(self, *args, **options):
match = re.match(regex_delete_netfac, line)
netfac = NetworkFacility.objects.get(id=match.group(1))
netfac.status = "ok"
- self.log("Undoing network facility deletion (#{})".format(
- netfac.id))
+ self.log("Undoing network facility deletion (#{})".format(netfac.id))
if self.commit:
netfac.save()
diff --git a/peeringdb_server/management/commands/pdb_fix_status_history.py b/peeringdb_server/management/commands/pdb_fix_status_history.py
index c3a77346..59dce4b0 100644
--- a/peeringdb_server/management/commands/pdb_fix_status_history.py
+++ b/peeringdb_server/management/commands/pdb_fix_status_history.py
@@ -15,8 +15,10 @@ class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument(
- '--commit', action='store_true',
- help="commit changes, otherwise run in pretend mode")
+ "--commit",
+ action="store_true",
+ help="commit changes, otherwise run in pretend mode",
+ )
def log(self, msg):
if not self.commit:
@@ -67,8 +69,11 @@ def process(self, tag):
if archived_status != entity.status:
fixed += 1
- self.log("Fixing {}-{} {} archived status: {}".format(
- tag, entity.id, version.id, entity.status))
+ self.log(
+ "Fixing {}-{} {} archived status: {}".format(
+ tag, entity.id, version.id, entity.status
+ )
+ )
if self.commit:
self.process_entity(entity, version)
@@ -83,8 +88,7 @@ def process_entity(self, entity, most_recent_version):
# note in comment why this revision was created
- reversion.set_comment(
- "Fixing status in object archives (script, #558)")
+ reversion.set_comment("Fixing status in object archives (script, #558)")
# add entity to revision
diff --git a/peeringdb_server/management/commands/pdb_generate_test_data.py b/peeringdb_server/management/commands/pdb_generate_test_data.py
index cbae5daf..78d17cea 100644
--- a/peeringdb_server/management/commands/pdb_generate_test_data.py
+++ b/peeringdb_server/management/commands/pdb_generate_test_data.py
@@ -15,10 +15,11 @@ class Command(BaseCommand):
help = "Will create test data. This will wipe all data locally, so use with caution. This command is NOT to be run on production or beta environments."
def add_arguments(self, parser):
- parser.add_argument('--commit', action='store_true',
- help="will commit the changes")
+ parser.add_argument(
+ "--commit", action="store_true", help="will commit the changes"
+ )
- parser.add_argument('--limit', type=int, default=2)
+ parser.add_argument("--limit", type=int, default=2)
def log(self, msg):
if self.commit:
@@ -31,8 +32,7 @@ def handle(self, *args, **options):
self.limit = options.get("limit")
if settings.RELEASE_ENV in ["prod", "beta"]:
- self.log(
- "This command is only allowed to run on dev / test instances")
+ self.log("This command is only allowed to run on dev / test instances")
return
self.mock = Mock()
@@ -54,15 +54,25 @@ def wipe(self):
def generate(self):
self.entities = dict([(k, []) for k in models.REFTAG_MAP.keys()])
queue = [
- "org", "net", "ix", "fac", "ixlan", "ixpfx", "ixfac", "netixlan",
- "netfac", "poc"
+ "org",
+ "net",
+ "ix",
+ "fac",
+ "ixlan",
+ "ixpfx",
+ "ixfac",
+ "netixlan",
+ "netfac",
+ "poc",
]
self.log("Wiping current data ...")
self.wipe()
self.log(
- "Making {} of each - Use the --limit option to increase or decrease (5 max)".
- format(self.limit))
+ "Making {} of each - Use the --limit option to increase or decrease (5 max)".format(
+ self.limit
+ )
+ )
if not self.commit:
return
diff --git a/peeringdb_server/management/commands/pdb_geosync.py b/peeringdb_server/management/commands/pdb_geosync.py
index a44e701a..4e3f8753 100644
--- a/peeringdb_server/management/commands/pdb_geosync.py
+++ b/peeringdb_server/management/commands/pdb_geosync.py
@@ -14,15 +14,21 @@ class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument(
- "reftag", nargs="?", help=
- "can be reftag only such as 'fac' or reftag with id to only sync that specific entity such as 'fac.1'"
+ "reftag",
+ nargs="?",
+ help="can be reftag only such as 'fac' or reftag with id to only sync that specific entity such as 'fac.1'",
)
parser.add_argument(
- "--limit", type=int, default=0,
- help="limit how many rows are synced, useful for testing")
+ "--limit",
+ type=int,
+ default=0,
+ help="limit how many rows are synced, useful for testing",
+ )
parser.add_argument(
- '--commit', action='store_true',
- help="commit changes, otherwise run in pretend mode")
+ "--commit",
+ action="store_true",
+ help="commit changes, otherwise run in pretend mode",
+ )
def log(self, msg):
if not self.commit:
@@ -47,8 +53,7 @@ def sync(self, reftag, _id, limit=0):
if not model:
raise ValueError(u"Unknown reftag: {}".format(reftag))
if not hasattr(model, "geocode_status"):
- raise TypeError(
- "Can only geosync models containing GeocodeBaseMixin")
+ raise TypeError("Can only geosync models containing GeocodeBaseMixin")
q = model.handleref.undeleted().filter(geocode_status=False)
if _id:
q = q.filter(id=_id)
@@ -60,8 +65,11 @@ def sync(self, reftag, _id, limit=0):
if entity.geocode_status:
continue
i += 1
- self.log(u"Syncing {} [{} {}/{} ID:{}]".format(entity.name, reftag, i,
- count, entity.id))
+ self.log(
+ u"Syncing {} [{} {}/{} ID:{}]".format(
+ entity.name, reftag, i, count, entity.id
+ )
+ )
if self.commit:
entity.geocode(self.gmaps)
diff --git a/peeringdb_server/management/commands/pdb_ixf_ixp_member_import.py b/peeringdb_server/management/commands/pdb_ixf_ixp_member_import.py
index c35c588b..cc5c1757 100644
--- a/peeringdb_server/management/commands/pdb_ixf_ixp_member_import.py
+++ b/peeringdb_server/management/commands/pdb_ixf_ixp_member_import.py
@@ -8,7 +8,7 @@
IXLan,
NetworkIXLan,
Network,
- )
+)
from peeringdb_server import ixf
@@ -17,21 +17,25 @@ class Command(BaseCommand):
commit = False
def add_arguments(self, parser):
- parser.add_argument('--commit', action='store_true',
- help="will commit changes to the database")
- parser.add_argument('--asn', type=int, default=0,
- help="Only process this ASN")
- parser.add_argument('--ixlan', type=int, nargs="*",
- help="Only process these ixlans")
- parser.add_argument('--debug', action="store_true",
- help="Show debug output")
- parser.add_argument('--preview', action="store_true",
- help="Run in preview mode")
- parser.add_argument('--cache', action="store_true",
- help="Only use locally cached IX-F data")
- parser.add_argument('--skip-import', action="store_true",
- help="Just update IX-F cache, do NOT perform any import logic")
-
+ parser.add_argument(
+ "--commit", action="store_true", help="will commit changes to the database"
+ )
+ parser.add_argument("--asn", type=int, default=0, help="Only process this ASN")
+ parser.add_argument(
+ "--ixlan", type=int, nargs="*", help="Only process these ixlans"
+ )
+ parser.add_argument("--debug", action="store_true", help="Show debug output")
+ parser.add_argument(
+ "--preview", action="store_true", help="Run in preview mode"
+ )
+ parser.add_argument(
+ "--cache", action="store_true", help="Only use locally cached IX-F data"
+ )
+ parser.add_argument(
+ "--skip-import",
+ action="store_true",
+ help="Just update IX-F cache, do NOT perform any import logic",
+ )
def log(self, msg, debug=False):
if self.preview:
@@ -70,26 +74,36 @@ def handle(self, *args, **options):
if ixlan_ids:
qset = qset.filter(id__in=ixlan_ids)
-
- total_log = {"data":[], "errors":[]}
+ total_log = {"data": [], "errors": []}
for ixlan in qset:
- self.log("Fetching data for {} from {}".format(
- ixlan, ixlan.ixf_ixp_member_list_url))
+ self.log(
+ "Fetching data for {} from {}".format(
+ ixlan, ixlan.ixf_ixp_member_list_url
+ )
+ )
try:
importer = ixf.Importer()
importer.skip_import = self.skip_import
importer.cache_only = self.cache
self.log("Updating {}".format(ixlan))
with transaction.atomic():
- success, netixlans, netixlans_deleted, log = importer.update(ixlan,
- save=self.commit,
- asn=asn)
+ success, netixlans, netixlans_deleted, log = importer.update(
+ ixlan, save=self.commit, asn=asn
+ )
self.log(json.dumps(log), debug=True)
- self.log("Done: {} updated: {} deleted: {}".format(
- success, len(netixlans), len(netixlans_deleted)))
+ self.log(
+ "Done: {} updated: {} deleted: {}".format(
+ success, len(netixlans), len(netixlans_deleted)
+ )
+ )
total_log["data"].extend(log["data"])
- total_log["errors"].extend(["{}({}): {}".format(ixlan.ix.name, ixlan.id, err) for err in log["errors"]])
+ total_log["errors"].extend(
+ [
+ "{}({}): {}".format(ixlan.ix.name, ixlan.id, err)
+ for err in log["errors"]
+ ]
+ )
except Exception as inst:
self.log("ERROR: {}".format(inst))
diff --git a/peeringdb_server/management/commands/pdb_ixp_merge.py b/peeringdb_server/management/commands/pdb_ixp_merge.py
index bfe348ea..c269fb6c 100644
--- a/peeringdb_server/management/commands/pdb_ixp_merge.py
+++ b/peeringdb_server/management/commands/pdb_ixp_merge.py
@@ -11,13 +11,12 @@ class Command(BaseCommand):
commit = False
def add_arguments(self, parser):
- parser.add_argument('--target', help="merge into this ixp")
+ parser.add_argument("--target", help="merge into this ixp")
parser.add_argument(
- '--ids', help=
- "merge these ixps (note: target ixp specified with the --target option)"
+ "--ids",
+ help="merge these ixps (note: target ixp specified with the --target option)",
)
- parser.add_argument('--commit', action='store_true',
- help="commit changes")
+ parser.add_argument("--commit", action="store_true", help="commit changes")
def log(self, msg):
if not self.commit:
@@ -37,8 +36,7 @@ def handle(self, *args, **options):
self.log("Merging %s into %s" % (ixp_from.name, ixp_to.name))
- ixlans_from = pdbm.IXLan.objects.filter(ix=ixp_from).exclude(
- status="deleted")
+ ixlans_from = pdbm.IXLan.objects.filter(ix=ixp_from).exclude(status="deleted")
for ixlan in ixlans_from:
ixlan.ix = ixp_to
self.log("Moving IXLAN %s to %s" % (ixlan.id, ixp_to.name))
@@ -49,4 +47,7 @@ def handle(self, *args, **options):
ixp_from.delete()
mail_users_entity_merge(
ixp_from.org.admin_usergroup.user_set.all(),
- ixp_to.org.admin_usergroup.user_set.all(), ixp_from, ixp_to)
+ ixp_to.org.admin_usergroup.user_set.all(),
+ ixp_from,
+ ixp_to,
+ )
diff --git a/peeringdb_server/management/commands/pdb_load_data.py b/peeringdb_server/management/commands/pdb_load_data.py
index 08fb509d..f82a3de7 100644
--- a/peeringdb_server/management/commands/pdb_load_data.py
+++ b/peeringdb_server/management/commands/pdb_load_data.py
@@ -12,6 +12,7 @@
from django_peeringdb import models as djpdb_models
from django_peeringdb import sync, settings as djpdb_settings
+
def sync_obj(cls, row):
"""
we need to override django peeringdb's sync_obj function
@@ -23,7 +24,7 @@ def sync_obj(cls, row):
return
try:
- obj = cls.objects.get(pk=row['id'])
+ obj = cls.objects.get(pk=row["id"])
except cls.DoesNotExist:
obj = cls()
@@ -44,7 +45,7 @@ def sync_obj(cls, row):
except AttributeError:
pass
- #print(obj, obj.id)
+ # print(obj, obj.id)
try:
# we want to validate because it fixes some values
@@ -81,37 +82,43 @@ class Command(BaseCommand):
help = "Load initial data from another peeringdb instance"
def add_arguments(self, parser):
- parser.add_argument("--url", default="https://www.peeringdb.com/api/",
- type=str)
-
- parser.add_argument('--commit', action='store_true',
- help="will commit the changes")
+ parser.add_argument("--url", default="https://www.peeringdb.com/api/", type=str)
+ parser.add_argument(
+ "--commit", action="store_true", help="will commit the changes"
+ )
def handle(self, *args, **options):
if settings.RELEASE_ENV != "dev" and not settings.TUTORIAL_MODE:
- self.stdout.write("Command can only be run on dev instances and instances "\
- "with tutorial mode enabled")
+ self.stdout.write(
+ "Command can only be run on dev instances and instances "
+ "with tutorial mode enabled"
+ )
return
if not options.get("commit"):
- self.stdout.write("This will sync data from {url} to this instance, and will take "\
- "roughly 20 minutes to complete on a fresh db. "\
- "Run the command with `--commit` if you are sure you want "\
- "to do this.".format(**options))
+ self.stdout.write(
+ "This will sync data from {url} to this instance, and will take "
+ "roughly 20 minutes to complete on a fresh db. "
+ "Run the command with `--commit` if you are sure you want "
+ "to do this.".format(**options)
+ )
return
-
djpdb_settings.SYNC_URL = options.get("url")
- pre_save.disconnect(signals.addressmodel_save,
- sender=pdb_models.Facility)
+ pre_save.disconnect(signals.addressmodel_save, sender=pdb_models.Facility)
djpdb_models.all_models = [
- pdb_models.Organization, pdb_models.Facility, pdb_models.Network,
- pdb_models.InternetExchange, pdb_models.InternetExchangeFacility,
- pdb_models.IXLan, pdb_models.IXLanPrefix,
- pdb_models.NetworkContact, pdb_models.NetworkFacility,
- pdb_models.NetworkIXLan
+ pdb_models.Organization,
+ pdb_models.Facility,
+ pdb_models.Network,
+ pdb_models.InternetExchange,
+ pdb_models.InternetExchangeFacility,
+ pdb_models.IXLan,
+ pdb_models.IXLanPrefix,
+ pdb_models.NetworkContact,
+ pdb_models.NetworkFacility,
+ pdb_models.NetworkIXLan,
]
call_command("pdb_sync")
diff --git a/peeringdb_server/management/commands/pdb_maintenance.py b/peeringdb_server/management/commands/pdb_maintenance.py
index 97145d68..729df633 100644
--- a/peeringdb_server/management/commands/pdb_maintenance.py
+++ b/peeringdb_server/management/commands/pdb_maintenance.py
@@ -2,13 +2,15 @@
from django.conf import settings
from peeringdb_server import maintenance
+
class Command(BaseCommand):
help = "Put instance in or out of maintenance mode"
def add_arguments(self, parser):
- parser.add_argument("state", nargs="?", choices=["on","off"])
- parser.add_argument('--commit', action='store_true',
- help="will commit the changes")
+ parser.add_argument("state", nargs="?", choices=["on", "off"])
+ parser.add_argument(
+ "--commit", action="store_true", help="will commit the changes"
+ )
def log(self, msg):
if self.commit:
@@ -21,14 +23,15 @@ def handle(self, *args, **options):
self.state = options.get("state")
if not settings.TUTORIAL_MODE:
- self.log("Command cannot be run on environment's that are not in tutorial mode. "\
- " Maintenance mode "\
- " is currently implemented to the extent that it is required to facilitate"\
- " an environment reset on `tutorial` type servers and probably needs more work"\
- " to be useful on production.")
+ self.log(
+ "Command cannot be run on environment's that are not in tutorial mode. "
+ " Maintenance mode "
+ " is currently implemented to the extent that it is required to facilitate"
+ " an environment reset on `tutorial` type servers and probably needs more work"
+ " to be useful on production."
+ )
return
-
self.log("Setting maintenance mode {}".format(self.state))
if self.state == "on":
maintenance.on()
diff --git a/peeringdb_server/management/commands/pdb_process_admin_tool_command.py b/peeringdb_server/management/commands/pdb_process_admin_tool_command.py
index 174e8a0c..e7c04cb0 100644
--- a/peeringdb_server/management/commands/pdb_process_admin_tool_command.py
+++ b/peeringdb_server/management/commands/pdb_process_admin_tool_command.py
@@ -4,14 +4,19 @@
from peeringdb_server.models import CommandLineTool
from peeringdb_server.admin_commandline_tools import get_tool_from_data
+
class Command(BaseCommand):
help = "Processes one item in the admin tool command queue"
- def log(self,msg):
+ def log(self, msg):
self.stdout.write(msg)
def handle(self, *args, **options):
- command = CommandLineTool.objects.filter(status="waiting").order_by("-created").first()
+ command = (
+ CommandLineTool.objects.filter(status="waiting")
+ .order_by("-created")
+ .first()
+ )
if command:
self.log("Running {}".format(command))
@@ -19,7 +24,7 @@ def handle(self, *args, **options):
command.save()
try:
- tool = get_tool_from_data({"tool":command.tool})
+ tool = get_tool_from_data({"tool": command.tool})
arguments = json.loads(command.arguments)
tool.kwargs = arguments.get("kwargs")
tool.args = arguments.get("args")
@@ -29,4 +34,3 @@ def handle(self, *args, **options):
command.status = "done"
command.result = "Command ended with error: {}".format(exc)
command.save()
-
diff --git a/peeringdb_server/management/commands/pdb_renumber_lans.py b/peeringdb_server/management/commands/pdb_renumber_lans.py
index 8ecdbfaf..299b6e86 100644
--- a/peeringdb_server/management/commands/pdb_renumber_lans.py
+++ b/peeringdb_server/management/commands/pdb_renumber_lans.py
@@ -13,11 +13,14 @@ class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument(
- '--commit', action='store_true',
- help="commit changes, otherwise run in pretend mode")
+ "--commit",
+ action="store_true",
+ help="commit changes, otherwise run in pretend mode",
+ )
parser.add_argument(
- '--ixlan', default=0,
- help="ixlan id, if set only renumber matches in this specific ixlan"
+ "--ixlan",
+ default=0,
+ help="ixlan id, if set only renumber matches in this specific ixlan",
)
parser.add_argument("ix", nargs="?", type=int)
parser.add_argument("old", nargs="?", type=str)
@@ -39,10 +42,15 @@ def renumber_lans(self, old, new):
new_prefix = ipaddress.ip_network(new)
if old_prefix.version != new_prefix.version:
- self.log("[error] {}".format("New prefix needs to be of same " \
- "protocol as old prefix"))
-
- prefixes = IXLanPrefix.objects.filter(prefix=old, ixlan__ix_id=self.ix, status="ok")
+ self.log(
+ "[error] {}".format(
+ "New prefix needs to be of same " "protocol as old prefix"
+ )
+ )
+
+ prefixes = IXLanPrefix.objects.filter(
+ prefix=old, ixlan__ix_id=self.ix, status="ok"
+ )
netixlans = NetworkIXLan.objects.filter(ixlan__ix_id=self.ix, status="ok")
if self.ixlan:
@@ -66,15 +74,17 @@ def renumber_lans(self, old, new):
self.log("[error] {}: {}".format(old_addr, exc))
continue
- self.log("Renumbering {} -> {}".format(
- netixlan.descriptive_name_ipv(new_addr.version), new_addr))
+ self.log(
+ "Renumbering {} -> {}".format(
+ netixlan.descriptive_name_ipv(new_addr.version), new_addr
+ )
+ )
if new_addr.version == 4:
netixlan.ipaddr4 = new_addr
else:
netixlan.ipaddr6 = new_addr
-
try:
netixlan.full_clean()
except ValidationError as exc:
diff --git a/peeringdb_server/management/commands/pdb_reversion_inspect.py b/peeringdb_server/management/commands/pdb_reversion_inspect.py
index 6843021d..ee76dfb9 100644
--- a/peeringdb_server/management/commands/pdb_reversion_inspect.py
+++ b/peeringdb_server/management/commands/pdb_reversion_inspect.py
@@ -5,9 +5,16 @@
from reversion.models import Version
MODELS = [
- pdbm.Organization, pdbm.Network, pdbm.InternetExchange,
- pdbm.InternetExchangeFacility, pdbm.Facility, pdbm.NetworkContact,
- pdbm.NetworkFacility, pdbm.IXLan, pdbm.IXLanPrefix, pdbm.NetworkIXLan
+ pdbm.Organization,
+ pdbm.Network,
+ pdbm.InternetExchange,
+ pdbm.InternetExchangeFacility,
+ pdbm.Facility,
+ pdbm.NetworkContact,
+ pdbm.NetworkFacility,
+ pdbm.IXLan,
+ pdbm.IXLanPrefix,
+ pdbm.NetworkIXLan,
]
@@ -32,7 +39,7 @@ def handle(self, *args, **options):
ids = [int(i) for i in options.get("id")]
- print(ref_tag, ids)
+ print (ref_tag, ids)
model = None
for m in MODELS:
@@ -47,8 +54,8 @@ def handle(self, *args, **options):
content_type = ContentType.objects.get_for_model(model)
for id in ids:
versions = Version.objects.filter(
- content_type=content_type,
- object_id=id).order_by('revision_id')
+ content_type=content_type, object_id=id
+ ).order_by("revision_id")
print "%s - %d:" % (ref_tag, id)
self.print_line()
prev = {}
@@ -57,7 +64,11 @@ def handle(self, *args, **options):
data = json.loads(version.serialized_data)[0].get("fields")
n += 1
print "VERSION: %d (%d) - %s - User: %s" % (
- n, version.id, data.get("updated"), version.revision.user)
+ n,
+ version.id,
+ data.get("updated"),
+ version.revision.user,
+ )
if not prev:
for k, v in data.items():
print "%s: '%s'" % (k, v)
diff --git a/peeringdb_server/management/commands/pdb_sponsorship_notify.py b/peeringdb_server/management/commands/pdb_sponsorship_notify.py
index a384d942..99c2aad1 100644
--- a/peeringdb_server/management/commands/pdb_sponsorship_notify.py
+++ b/peeringdb_server/management/commands/pdb_sponsorship_notify.py
@@ -13,7 +13,10 @@ def handle(self, *args, **options):
now = datetime.datetime.now()
for sponsorship in Sponsorship.objects.filter(end_date__lt=now):
- if sponsorship.notify_date is None or sponsorship.notify_date < sponsorship.end_date:
+ if (
+ sponsorship.notify_date is None
+ or sponsorship.notify_date < sponsorship.end_date
+ ):
b = sponsorship.notify_expiration()
- #if b:
+ # if b:
# self.log("Sent expiration notices for %s, expired on %s" % (sponsorship.org.name, sponsorship.end_date))
diff --git a/peeringdb_server/management/commands/pdb_stats.py b/peeringdb_server/management/commands/pdb_stats.py
index dc9dfcb9..55df33e6 100644
--- a/peeringdb_server/management/commands/pdb_stats.py
+++ b/peeringdb_server/management/commands/pdb_stats.py
@@ -21,23 +21,27 @@ class Command(BaseCommand):
tags = ["fac", "ix", "net", "org"]
def add_arguments(self, parser):
- parser.add_argument("--date", action="store", default=None,
- help="generate stats for this date")
- parser.add_argument("--format", action="store", default="text",
- help="output format to use")
+ parser.add_argument(
+ "--date", action="store", default=None, help="generate stats for this date"
+ )
+ parser.add_argument(
+ "--format", action="store", default="text", help="output format to use"
+ )
def status_at_date(self, obj, dt):
versions = Version.objects.get_for_object(obj)
- version = versions.filter(revision__date_created__lte=dt).order_by(
- "-revision__date_created").first()
+ version = (
+ versions.filter(revision__date_created__lte=dt)
+ .order_by("-revision__date_created")
+ .first()
+ )
if version:
return version.field_dict["status"]
else:
return obj.status
-
def handle(self, *args, **options):
- date = options.get('date', None)
+ date = options.get("date", None)
if date:
dt = datetime.datetime.strptime(date, "%Y%m%d")
stats = self.generate_for_past_date(dt)
@@ -112,10 +116,8 @@ def generate_for_current_date(self):
model = REFTAG_MAP[tag]
stats[tag] = model.objects.filter(status="ok").count()
-
return {"stats": stats, "dt": dt}
-
def generate_for_past_date(self, dt):
"""
Generate and return stats for past date
@@ -140,4 +142,3 @@ def generate_for_past_date(self, dt):
stats[tag] += 1
return {"stats": stats, "dt": dt}
-
diff --git a/peeringdb_server/management/commands/pdb_status.py b/peeringdb_server/management/commands/pdb_status.py
index 6a8bec56..6623fa25 100644
--- a/peeringdb_server/management/commands/pdb_status.py
+++ b/peeringdb_server/management/commands/pdb_status.py
@@ -2,9 +2,16 @@
import peeringdb_server.models as pdbm
MODELS = [
- pdbm.Organization, pdbm.Network, pdbm.InternetExchange,
- pdbm.InternetExchangeFacility, pdbm.Facility, pdbm.NetworkContact,
- pdbm.NetworkFacility, pdbm.IXLan, pdbm.IXLanPrefix, pdbm.NetworkIXLan
+ pdbm.Organization,
+ pdbm.Network,
+ pdbm.InternetExchange,
+ pdbm.InternetExchangeFacility,
+ pdbm.Facility,
+ pdbm.NetworkContact,
+ pdbm.NetworkFacility,
+ pdbm.IXLan,
+ pdbm.IXLanPrefix,
+ pdbm.NetworkIXLan,
]
STATUS_TYPES = ["ok", "pending", "deleted"]
@@ -29,80 +36,110 @@ def handle(self, *args, **options):
counts = {}
for c in STATUS_TYPES:
counts[c] = model.objects.filter(status=c).count()
- counts["invalid"] = model.objects.exclude(
- status__in=STATUS_TYPES).count()
+ counts["invalid"] = model.objects.exclude(status__in=STATUS_TYPES).count()
- self.log(model.handleref.tag, " ".join(
- ["%s(%d)" % (k, v) for k, v in counts.items()]))
+ self.log(
+ model.handleref.tag,
+ " ".join(["%s(%d)" % (k, v) for k, v in counts.items()]),
+ )
# VERSION: print the id of the instances with the highest
# version for each model - this allows to spot possible import issues
self.print_line()
- self.log("version",
- "5 highest version numbers for each handleref type")
+ self.log("version", "5 highest version numbers for each handleref type")
self.print_line()
for model in MODELS:
inst = model.objects.order_by("-version")[:5]
- self.log(model.handleref.tag, ",".join(
- ["%d v=%d" % (o.id, o.version) for o in inst]))
+ self.log(
+ model.handleref.tag,
+ ",".join(["%d v=%d" % (o.id, o.version) for o in inst]),
+ )
# Find orphaned elements
- ixlan = pdbm.IXLan.objects.filter(
- status="ok", ix__status="deleted").select_related("ix").count()
+ ixlan = (
+ pdbm.IXLan.objects.filter(status="ok", ix__status="deleted")
+ .select_related("ix")
+ .count()
+ )
if ixlan > 0:
print "%d orphaned ixlans (ix status='deleted')" % ixlan
- ixfac = pdbm.InternetExchangeFacility.objects.filter(
- status="ok", ix__status="deleted").select_related("ix").count()
+ ixfac = (
+ pdbm.InternetExchangeFacility.objects.filter(
+ status="ok", ix__status="deleted"
+ )
+ .select_related("ix")
+ .count()
+ )
if ixfac > 0:
print "%d orphaned ixfacs (ix status='deleted')" % ixfac
- ixfac = pdbm.InternetExchangeFacility.objects.filter(
- status="ok",
- facility__status="deleted").select_related("facility").count()
+ ixfac = (
+ pdbm.InternetExchangeFacility.objects.filter(
+ status="ok", facility__status="deleted"
+ )
+ .select_related("facility")
+ .count()
+ )
if ixfac > 0:
print "%d orphaned ixfacs (fac status='deleted')" % ixfac
- netfac = pdbm.NetworkFacility.objects.filter(
- status="ok",
- network__status="deleted").select_related("network").count()
+ netfac = (
+ pdbm.NetworkFacility.objects.filter(status="ok", network__status="deleted")
+ .select_related("network")
+ .count()
+ )
if netfac > 0:
print "%d orphaned netfacs (net status='deleted')" % netfac
- netfac = pdbm.NetworkFacility.objects.filter(
- status="ok",
- facility__status="deleted").select_related("facility").count()
+ netfac = (
+ pdbm.NetworkFacility.objects.filter(status="ok", facility__status="deleted")
+ .select_related("facility")
+ .count()
+ )
if netfac > 0:
print "%d orphaned netfacs (fac status='deleted')" % netfac
- poc = pdbm.NetworkContact.objects.filter(
- status="ok",
- network__status="deleted").select_related("network").count()
+ poc = (
+ pdbm.NetworkContact.objects.filter(status="ok", network__status="deleted")
+ .select_related("network")
+ .count()
+ )
if poc > 0:
print "%d orphaned poc (net status='deleted')" % poc
- netixlan = pdbm.NetworkIXLan.objects.filter(
- status="ok",
- network__status="deleted").select_related("network").count()
+ netixlan = (
+ pdbm.NetworkIXLan.objects.filter(status="ok", network__status="deleted")
+ .select_related("network")
+ .count()
+ )
if netixlan > 0:
print "%d orphaned netixlans (net status='deleted')" % netixlan
- netixlan = pdbm.NetworkIXLan.objects.filter(
- status="ok",
- ixlan__status="deleted").select_related("ixlan").count()
+ netixlan = (
+ pdbm.NetworkIXLan.objects.filter(status="ok", ixlan__status="deleted")
+ .select_related("ixlan")
+ .count()
+ )
if netixlan > 0:
print "%d orphaned netixlans (ixlan status='deleted')" % netixlan
- ixpfx = pdbm.IXLanPrefix.objects.filter(
- status="ok",
- ixlan__status="deleted").select_related("ixlan").count()
+ ixpfx = (
+ pdbm.IXLanPrefix.objects.filter(status="ok", ixlan__status="deleted")
+ .select_related("ixlan")
+ .count()
+ )
if ixpfx:
print "%d orphaned ixpfxs (ixlan status='deleted')" % ixpfx
for model in [pdbm.Network, pdbm.InternetExchange, pdbm.Facility]:
- count = model.objects.filter(
- status="ok",
- org__status="deleted").select_related("org").count()
+ count = (
+ model.objects.filter(status="ok", org__status="deleted")
+ .select_related("org")
+ .count()
+ )
if count > 0:
print "%d orphaned %ss (org status='deleted')" % (
- count, model.handleref.tag)
+ count,
+ model.handleref.tag,
+ )
diff --git a/peeringdb_server/management/commands/pdb_undelete.py b/peeringdb_server/management/commands/pdb_undelete.py
index 89e06431..f8e9a865 100644
--- a/peeringdb_server/management/commands/pdb_undelete.py
+++ b/peeringdb_server/management/commands/pdb_undelete.py
@@ -11,13 +11,16 @@ class Command(BaseCommand):
help = "Undo object deletion"
def add_arguments(self, parser):
- parser.add_argument("reftag", nargs="?",
- help="object reftag (net, ix, fac etc..)")
+ parser.add_argument(
+ "reftag", nargs="?", help="object reftag (net, ix, fac etc..)"
+ )
parser.add_argument("id", nargs="?", help="object id")
- parser.add_argument("version_id", nargs="?",
- help="object version id where it was deleted")
- parser.add_argument('--commit', action='store_true',
- help="will commit the changes")
+ parser.add_argument(
+ "version_id", nargs="?", help="object version id where it was deleted"
+ )
+ parser.add_argument(
+ "--commit", action="store_true", help="will commit the changes"
+ )
def log(self, msg):
if self.commit:
@@ -36,7 +39,8 @@ def handle(self, *args, **options):
self.version_id = options.get("version_id")
self.suppress_warning = None
self.version = version = reversion.models.Version.objects.get(
- id=self.version_id)
+ id=self.version_id
+ )
self.date = version.revision.date_created
self.log("UNDELETING FROM DATE: {}".format(self.date))
self.undelete(options.get("reftag"), options.get("id"))
@@ -47,15 +51,15 @@ def handle_netixlan(self, netixlan):
if conflict_ip4:
# ipv4 exists in another netixlan now
- others = model.objects.filter(ipaddr4=netixlan.ipaddr4,
- status="ok")
- for other in [
- o for o in others if o.ixlan.ix_id == netixlan.ixlan.ix_id
- ]:
+ others = model.objects.filter(ipaddr4=netixlan.ipaddr4, status="ok")
+ for other in [o for o in others if o.ixlan.ix_id == netixlan.ixlan.ix_id]:
# netixlan is at same ix as the one being undeleted, delete the other
# one so we can proceed with undeletion
- self.log("Found duplicate netixlan at same ix: {} - deleting".
- format(other.ipaddr4))
+ self.log(
+ "Found duplicate netixlan at same ix: {} - deleting".format(
+ other.ipaddr4
+ )
+ )
if self.commit:
other.delete()
else:
@@ -63,24 +67,22 @@ def handle_netixlan(self, netixlan):
# are not deleting the conflict
self.suppress_warning = True
- for other in [
- o for o in others if o.ixlan.ix_id != netixlan.ixlan.ix_id
- ]:
+ for other in [o for o in others if o.ixlan.ix_id != netixlan.ixlan.ix_id]:
# unless ipv4 also exists in a netixlan that is NOT at the same ix
# then we need the warning again
self.suppress_warning = False
if conflict_ip6:
# ipv6 exists in another netixlan now
- others = model.objects.filter(ipaddr6=netixlan.ipaddr6,
- status="ok")
- for other in [
- o for o in others if o.ixlan.ix_id == netixlan.ixlan.ix_id
- ]:
+ others = model.objects.filter(ipaddr6=netixlan.ipaddr6, status="ok")
+ for other in [o for o in others if o.ixlan.ix_id == netixlan.ixlan.ix_id]:
# netixlan is at same ix as the one being undeleted, delete the other
# one so we can proceed with undeletion
- self.log("Found duplicate netixlan at same ix: {} - deleting".
- format(other.ipaddr6))
+ self.log(
+ "Found duplicate netixlan at same ix: {} - deleting".format(
+ other.ipaddr6
+ )
+ )
if self.commit:
other.delete()
else:
@@ -88,9 +90,7 @@ def handle_netixlan(self, netixlan):
# are not deleting the conflict
self.suppress_warning = True
- for other in [
- o for o in others if o.ixlan.ix_id != netixlan.ixlan.ix_id
- ]:
+ for other in [o for o in others if o.ixlan.ix_id != netixlan.ixlan.ix_id]:
# unless ipv6 also exists in a netixlan that is NOT at the same ix
# then we need the warning again
self.suppress_warning = False
@@ -106,18 +106,22 @@ def _label(obj):
return obj
if date:
- version = reversion.models.Version.objects.get_for_object(
- obj).filter(revision__date_created__lt=date).order_by(
- "revision__date_created").last()
+ version = (
+ reversion.models.Version.objects.get_for_object(obj)
+ .filter(revision__date_created__lt=date)
+ .order_by("revision__date_created")
+ .last()
+ )
try:
- status = json.loads(
- version.serialized_data)[0].get("fields")["status"]
+ status = json.loads(version.serialized_data)[0].get("fields")["status"]
except:
status = None
if status == "deleted":
self.log_warn(
"{} was already deleted at snapshot, skipping ..".format(
- _label(obj)))
+ _label(obj)
+ )
+ )
return
can_undelete_obj = True
@@ -133,8 +137,10 @@ def _label(obj):
if relation.status == "deleted" and relation != parent:
can_undelete_obj = False
self.log_warn(
- "Cannot undelete {}, dependent relation marked as deleted: {}"
- .format(_label(obj), relation))
+ "Cannot undelete {}, dependent relation marked as deleted: {}".format(
+ _label(obj), relation
+ )
+ )
if not can_undelete_obj:
return
@@ -153,8 +159,7 @@ def _label(obj):
obj.save()
except Exception as exc:
if not self.suppress_warning:
- self.log_warn("Cannot undelete {}: {}".format(
- _label(obj), exc))
+ self.log_warn("Cannot undelete {}: {}".format(_label(obj), exc))
for field in cls._meta.get_fields():
if field.is_relation:
@@ -167,5 +172,4 @@ def _label(obj):
if not hasattr(field.related_model, "ref_tag"):
continue
for child in relation.filter(updated__gte=self.date):
- self.undelete(child.ref_tag, child.id, obj,
- date=self.date)
+ self.undelete(child.ref_tag, child.id, obj, date=self.date)
diff --git a/peeringdb_server/management/commands/pdb_whois.py b/peeringdb_server/management/commands/pdb_whois.py
index d2011367..1d5d2aef 100644
--- a/peeringdb_server/management/commands/pdb_whois.py
+++ b/peeringdb_server/management/commands/pdb_whois.py
@@ -13,14 +13,14 @@
class Command(DBCommand):
- args = ''
- help = 'command line whois'
+ args = ""
+ help = "command line whois"
def add_arguments(self, parser):
parser.add_argument("ref", nargs="?", type=str)
def handle(self, *args, **options):
- log = logging.getLogger('pdb.script.whois')
+ log = logging.getLogger("pdb.script.whois")
# FIXME - ignore multiple args for now
args = options.get("ref")
@@ -31,7 +31,7 @@ def handle(self, *args, **options):
log.error("Unknown query type '%s'" % (args))
return
# TODO
- raise CommandError('unk query')
+ raise CommandError("unk query")
model = None
@@ -40,26 +40,23 @@ def handle(self, *args, **options):
if ref_tag in models.REFTAG_MAP:
model = models.REFTAG_MAP[ref_tag]
Serializer = serializers.REFTAG_MAP[ref_tag]
- obj = Serializer.prefetch_related(model.objects, None,
- depth=2).get(pk=pk)
+ obj = Serializer.prefetch_related(model.objects, None, depth=2).get(pk=pk)
- elif ref_tag == 'as':
- model = models.REFTAG_MAP['net']
- Serializer = serializers.REFTAG_MAP['net']
- obj = Serializer.prefetch_related(model.objects, None,
- depth=2).get(asn=pk)
+ elif ref_tag == "as":
+ model = models.REFTAG_MAP["net"]
+ Serializer = serializers.REFTAG_MAP["net"]
+ obj = Serializer.prefetch_related(model.objects, None, depth=2).get(asn=pk)
+ # data = cls(obj).data
-# data = cls(obj).data
-
-# TODO doesn't work on client
-# elif ref_tag == 'ixnets':
+ # TODO doesn't work on client
+ # elif ref_tag == 'ixnets':
if not model:
msg = "Unknown ref tag: {}".format(ref_tag)
log.error("Unknown ref tag: %s" % ref_tag)
raise ValueError(msg)
- data = Serializer(obj, context={"user":AnonymousUser()}).data
+ data = Serializer(obj, context={"user": AnonymousUser()}).data
fmt = WhoisFormat()
- fmt. print(obj._handleref.tag, data)
+ fmt.print(obj._handleref.tag, data)
diff --git a/peeringdb_server/management/commands/pdb_wipe.py b/peeringdb_server/management/commands/pdb_wipe.py
index fbfd3c48..5dfff52e 100644
--- a/peeringdb_server/management/commands/pdb_wipe.py
+++ b/peeringdb_server/management/commands/pdb_wipe.py
@@ -1,26 +1,38 @@
from django.core.management.base import BaseCommand
-from peeringdb_server.models import REFTAG_MAP, NetworkContact, User, Sponsorship, Partnership
+from peeringdb_server.models import (
+ REFTAG_MAP,
+ NetworkContact,
+ User,
+ Sponsorship,
+ Partnership,
+)
from django.core.management import call_command
from django.contrib.auth.models import Group
from django.conf import settings
+
class Command(BaseCommand):
help = "Wipe all peering data, including users - superusers will be kept - cannot be used in production environments"
def add_arguments(self, parser):
- parser.add_argument('--commit', action='store_true',
- help="will commit the changes")
-
- parser.add_argument('--keep-users', action='store_true',
- help="do not delete users")
-
- parser.add_argument('--load-data', action='store_true',
- help="load data after wipe")
+ parser.add_argument(
+ "--commit", action="store_true", help="will commit the changes"
+ )
- parser.add_argument('--load-data-url', type=str, default="https://www.peeringdb.com/api",
- help="load data from here")
+ parser.add_argument(
+ "--keep-users", action="store_true", help="do not delete users"
+ )
+ parser.add_argument(
+ "--load-data", action="store_true", help="load data after wipe"
+ )
+ parser.add_argument(
+ "--load-data-url",
+ type=str,
+ default="https://www.peeringdb.com/api",
+ help="load data from here",
+ )
def log(self, msg):
if self.commit:
@@ -67,5 +79,9 @@ def handle(self, *args, **options):
self.log("Cleared seassions")
if self.load_data:
- call_command("pdb_load_data", commit=self.commit,
- url=self.load_data_url, stdout=self.stdout)
+ call_command(
+ "pdb_load_data",
+ commit=self.commit,
+ url=self.load_data_url,
+ stdout=self.stdout,
+ )
diff --git a/peeringdb_server/migrations/0001_initial.py b/peeringdb_server/migrations/0001_initial.py
index 4e5147d4..a18b9976 100644
--- a/peeringdb_server/migrations/0001_initial.py
+++ b/peeringdb_server/migrations/0001_initial.py
@@ -21,825 +21,1159 @@ class Migration(migrations.Migration):
initial = True
dependencies = [
- ('contenttypes', '0002_remove_content_type_name'),
- ('auth', '0008_alter_user_username_max_length'),
+ ("contenttypes", "0002_remove_content_type_name"),
+ ("auth", "0008_alter_user_username_max_length"),
]
operations = [
migrations.CreateModel(
- name='User',
+ name="User",
fields=[
- ('id',
- models.AutoField(auto_created=True, primary_key=True,
- serialize=False, verbose_name='ID')),
- ('password',
- models.CharField(max_length=128, verbose_name='password')),
- ('last_login',
- models.DateTimeField(blank=True, null=True,
- verbose_name='last login')),
- ('is_superuser',
- models.BooleanField(
- default=False, help_text=
- 'Designates that this user has all permissions without explicitly assigning them.',
- verbose_name='superuser status')),
- ('username',
- models.CharField(
- help_text='Required. Letters, digits and [@.+-/_=|] only.',
- max_length=254, unique=True, validators=[
- django.core.validators.RegexValidator(
- b'^[\\w\\.@+-=|/]+$', 'Enter a valid username.',
- b'invalid', flags=32)
- ], verbose_name='username')),
- ('email',
- models.EmailField(max_length=254,
- verbose_name='email address')),
- ('first_name',
- models.CharField(blank=True, max_length=254,
- verbose_name='first name')),
- ('last_name',
- models.CharField(blank=True, max_length=254,
- verbose_name='last name')),
- ('is_staff',
- models.BooleanField(
- default=False, help_text=
- 'Designates whether the user can log into admin site.',
- verbose_name='staff status')),
- ('is_active',
- models.BooleanField(
- default=True, help_text=
- 'Designates whether this user should be treated as active. Unselect this instead of deleting accounts.',
- verbose_name='active')),
- ('date_joined',
- models.DateTimeField(default=django.utils.timezone.now,
- verbose_name='date joined')),
- ('created',
- django_handleref.models.CreatedDateTimeField(
- auto_now_add=True, verbose_name='Created')),
- ('updated',
- django_handleref.models.UpdatedDateTimeField(
- auto_now=True, verbose_name='Updated')),
- ('status',
- models.CharField(default=b'ok', max_length=254,
- verbose_name='status')),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
+ ("password", models.CharField(max_length=128, verbose_name="password")),
+ (
+ "last_login",
+ models.DateTimeField(
+ blank=True, null=True, verbose_name="last login"
+ ),
+ ),
+ (
+ "is_superuser",
+ models.BooleanField(
+ default=False,
+ help_text="Designates that this user has all permissions without explicitly assigning them.",
+ verbose_name="superuser status",
+ ),
+ ),
+ (
+ "username",
+ models.CharField(
+ help_text="Required. Letters, digits and [@.+-/_=|] only.",
+ max_length=254,
+ unique=True,
+ validators=[
+ django.core.validators.RegexValidator(
+ b"^[\\w\\.@+-=|/]+$",
+ "Enter a valid username.",
+ b"invalid",
+ flags=32,
+ )
+ ],
+ verbose_name="username",
+ ),
+ ),
+ (
+ "email",
+ models.EmailField(max_length=254, verbose_name="email address"),
+ ),
+ (
+ "first_name",
+ models.CharField(
+ blank=True, max_length=254, verbose_name="first name"
+ ),
+ ),
+ (
+ "last_name",
+ models.CharField(
+ blank=True, max_length=254, verbose_name="last name"
+ ),
+ ),
+ (
+ "is_staff",
+ models.BooleanField(
+ default=False,
+ help_text="Designates whether the user can log into admin site.",
+ verbose_name="staff status",
+ ),
+ ),
+ (
+ "is_active",
+ models.BooleanField(
+ default=True,
+ help_text="Designates whether this user should be treated as active. Unselect this instead of deleting accounts.",
+ verbose_name="active",
+ ),
+ ),
+ (
+ "date_joined",
+ models.DateTimeField(
+ default=django.utils.timezone.now, verbose_name="date joined"
+ ),
+ ),
+ (
+ "created",
+ django_handleref.models.CreatedDateTimeField(
+ auto_now_add=True, verbose_name="Created"
+ ),
+ ),
+ (
+ "updated",
+ django_handleref.models.UpdatedDateTimeField(
+ auto_now=True, verbose_name="Updated"
+ ),
+ ),
+ (
+ "status",
+ models.CharField(
+ default=b"ok", max_length=254, verbose_name="status"
+ ),
+ ),
],
options={
- 'db_table': 'peeringdb_user',
- 'verbose_name': 'user',
- 'verbose_name_plural': 'users',
+ "db_table": "peeringdb_user",
+ "verbose_name": "user",
+ "verbose_name_plural": "users",
},
- managers=[
- ('objects', django.contrib.auth.models.UserManager()),
- ],
+ managers=[("objects", django.contrib.auth.models.UserManager()),],
),
migrations.CreateModel(
- name='Facility',
+ name="Facility",
fields=[
- ('id', models.AutoField(primary_key=True, serialize=False)),
- ('status',
- models.CharField(blank=True, max_length=255,
- verbose_name='Status')),
- ('created',
- django_handleref.models.CreatedDateTimeField(
- auto_now_add=True, verbose_name='Created')),
- ('updated',
- django_handleref.models.UpdatedDateTimeField(
- auto_now=True, verbose_name='Updated')),
- ('version', models.IntegerField(default=0)),
- ('address1', models.CharField(blank=True, max_length=255)),
- ('address2', models.CharField(blank=True, max_length=255)),
- ('city', models.CharField(blank=True, max_length=255)),
- ('state', models.CharField(blank=True, max_length=255)),
- ('zipcode', models.CharField(blank=True, max_length=48)),
- ('country',
- django_countries.fields.CountryField(blank=True,
- max_length=2)),
- ('name', models.CharField(max_length=255, unique=True)),
- ('website',
- django_peeringdb.models.abstract.URLField(
- blank=True, max_length=255)),
- ('clli', models.CharField(blank=True, max_length=18)),
- ('rencode', models.CharField(blank=True, max_length=18)),
- ('npanxx', models.CharField(blank=True, max_length=21)),
- ('notes', models.TextField(blank=True)),
+ ("id", models.AutoField(primary_key=True, serialize=False)),
+ (
+ "status",
+ models.CharField(blank=True, max_length=255, verbose_name="Status"),
+ ),
+ (
+ "created",
+ django_handleref.models.CreatedDateTimeField(
+ auto_now_add=True, verbose_name="Created"
+ ),
+ ),
+ (
+ "updated",
+ django_handleref.models.UpdatedDateTimeField(
+ auto_now=True, verbose_name="Updated"
+ ),
+ ),
+ ("version", models.IntegerField(default=0)),
+ ("address1", models.CharField(blank=True, max_length=255)),
+ ("address2", models.CharField(blank=True, max_length=255)),
+ ("city", models.CharField(blank=True, max_length=255)),
+ ("state", models.CharField(blank=True, max_length=255)),
+ ("zipcode", models.CharField(blank=True, max_length=48)),
+ (
+ "country",
+ django_countries.fields.CountryField(blank=True, max_length=2),
+ ),
+ ("name", models.CharField(max_length=255, unique=True)),
+ (
+ "website",
+ django_peeringdb.models.abstract.URLField(
+ blank=True, max_length=255
+ ),
+ ),
+ ("clli", models.CharField(blank=True, max_length=18)),
+ ("rencode", models.CharField(blank=True, max_length=18)),
+ ("npanxx", models.CharField(blank=True, max_length=21)),
+ ("notes", models.TextField(blank=True)),
],
options={
- 'abstract': False,
- 'db_table': 'peeringdb_facility',
- 'verbose_name_plural': 'Facilities',
+ "abstract": False,
+ "db_table": "peeringdb_facility",
+ "verbose_name_plural": "Facilities",
},
- managers=[
- ('handleref', django.db.models.manager.Manager()),
- ],
+ managers=[("handleref", django.db.models.manager.Manager()),],
),
migrations.CreateModel(
- name='InternetExchange',
+ name="InternetExchange",
fields=[
- ('id', models.AutoField(primary_key=True, serialize=False)),
- ('status',
- models.CharField(blank=True, max_length=255,
- verbose_name='Status')),
- ('created',
- django_handleref.models.CreatedDateTimeField(
- auto_now_add=True, verbose_name='Created')),
- ('updated',
- django_handleref.models.UpdatedDateTimeField(
- auto_now=True, verbose_name='Updated')),
- ('version', models.IntegerField(default=0)),
- ('name', models.CharField(max_length=64, unique=True)),
- ('name_long', models.CharField(blank=True, max_length=254)),
- ('city', models.CharField(max_length=192)),
- ('country',
- django_countries.fields.CountryField(max_length=2)),
- ('notes', models.TextField(blank=True)),
- ('region_continent',
- models.CharField(
- choices=[(b'North America', b'North America'),
- (b'Asia Pacific', b'Asia Pacific'), (b'Europe',
- b'Europe'),
- (b'South America',
- b'South America'), (b'Africa', b'Africa'),
- (b'Australia', b'Australia'), (b'Middle East',
- b'Middle East')],
- max_length=255)),
- ('media',
- models.CharField(
- choices=[(b'Ethernet', b'Ethernet'), (b'ATM', b'ATM'),
- (b'Multiple', b'Multiple')], max_length=128)),
- ('proto_unicast', models.BooleanField(default=False)),
- ('proto_multicast', models.BooleanField(default=False)),
- ('proto_ipv6', models.BooleanField(default=False)),
- ('website',
- django_peeringdb.models.abstract.URLField(
- blank=True, max_length=255)),
- ('url_stats',
- django_peeringdb.models.abstract.URLField(
- blank=True, max_length=255)),
- ('tech_email', models.EmailField(blank=True, max_length=254)),
- ('tech_phone', models.CharField(blank=True, max_length=192)),
- ('policy_email', models.EmailField(blank=True,
- max_length=254)),
- ('policy_phone', models.CharField(blank=True, max_length=192)),
- ],
- options={
- 'abstract': False,
- 'db_table': 'peeringdb_ix',
- },
- managers=[
- ('handleref', django.db.models.manager.Manager()),
+ ("id", models.AutoField(primary_key=True, serialize=False)),
+ (
+ "status",
+ models.CharField(blank=True, max_length=255, verbose_name="Status"),
+ ),
+ (
+ "created",
+ django_handleref.models.CreatedDateTimeField(
+ auto_now_add=True, verbose_name="Created"
+ ),
+ ),
+ (
+ "updated",
+ django_handleref.models.UpdatedDateTimeField(
+ auto_now=True, verbose_name="Updated"
+ ),
+ ),
+ ("version", models.IntegerField(default=0)),
+ ("name", models.CharField(max_length=64, unique=True)),
+ ("name_long", models.CharField(blank=True, max_length=254)),
+ ("city", models.CharField(max_length=192)),
+ ("country", django_countries.fields.CountryField(max_length=2)),
+ ("notes", models.TextField(blank=True)),
+ (
+ "region_continent",
+ models.CharField(
+ choices=[
+ (b"North America", b"North America"),
+ (b"Asia Pacific", b"Asia Pacific"),
+ (b"Europe", b"Europe"),
+ (b"South America", b"South America"),
+ (b"Africa", b"Africa"),
+ (b"Australia", b"Australia"),
+ (b"Middle East", b"Middle East"),
+ ],
+ max_length=255,
+ ),
+ ),
+ (
+ "media",
+ models.CharField(
+ choices=[
+ (b"Ethernet", b"Ethernet"),
+ (b"ATM", b"ATM"),
+ (b"Multiple", b"Multiple"),
+ ],
+ max_length=128,
+ ),
+ ),
+ ("proto_unicast", models.BooleanField(default=False)),
+ ("proto_multicast", models.BooleanField(default=False)),
+ ("proto_ipv6", models.BooleanField(default=False)),
+ (
+ "website",
+ django_peeringdb.models.abstract.URLField(
+ blank=True, max_length=255
+ ),
+ ),
+ (
+ "url_stats",
+ django_peeringdb.models.abstract.URLField(
+ blank=True, max_length=255
+ ),
+ ),
+ ("tech_email", models.EmailField(blank=True, max_length=254)),
+ ("tech_phone", models.CharField(blank=True, max_length=192)),
+ ("policy_email", models.EmailField(blank=True, max_length=254)),
+ ("policy_phone", models.CharField(blank=True, max_length=192)),
],
+ options={"abstract": False, "db_table": "peeringdb_ix",},
+ managers=[("handleref", django.db.models.manager.Manager()),],
),
migrations.CreateModel(
- name='InternetExchangeFacility',
+ name="InternetExchangeFacility",
fields=[
- ('id', models.AutoField(primary_key=True, serialize=False)),
- ('status',
- models.CharField(blank=True, max_length=255,
- verbose_name='Status')),
- ('created',
- django_handleref.models.CreatedDateTimeField(
- auto_now_add=True, verbose_name='Created')),
- ('updated',
- django_handleref.models.UpdatedDateTimeField(
- auto_now=True, verbose_name='Updated')),
- ('version', models.IntegerField(default=0)),
- ('facility',
- models.ForeignKey(default=0,
- on_delete=django.db.models.deletion.CASCADE,
- related_name='ixfac_set',
- to='peeringdb_server.Facility')),
- ('ix',
- models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,
- related_name='ixfac_set',
- to='peeringdb_server.InternetExchange')),
- ],
- options={
- 'db_table': 'peeringdb_ix_facility',
- },
- managers=[
- ('handleref', django.db.models.manager.Manager()),
+ ("id", models.AutoField(primary_key=True, serialize=False)),
+ (
+ "status",
+ models.CharField(blank=True, max_length=255, verbose_name="Status"),
+ ),
+ (
+ "created",
+ django_handleref.models.CreatedDateTimeField(
+ auto_now_add=True, verbose_name="Created"
+ ),
+ ),
+ (
+ "updated",
+ django_handleref.models.UpdatedDateTimeField(
+ auto_now=True, verbose_name="Updated"
+ ),
+ ),
+ ("version", models.IntegerField(default=0)),
+ (
+ "facility",
+ models.ForeignKey(
+ default=0,
+ on_delete=django.db.models.deletion.CASCADE,
+ related_name="ixfac_set",
+ to="peeringdb_server.Facility",
+ ),
+ ),
+ (
+ "ix",
+ models.ForeignKey(
+ on_delete=django.db.models.deletion.CASCADE,
+ related_name="ixfac_set",
+ to="peeringdb_server.InternetExchange",
+ ),
+ ),
],
+ options={"db_table": "peeringdb_ix_facility",},
+ managers=[("handleref", django.db.models.manager.Manager()),],
),
migrations.CreateModel(
- name='IXLan',
+ name="IXLan",
fields=[
- ('id', models.AutoField(primary_key=True, serialize=False)),
- ('status',
- models.CharField(blank=True, max_length=255,
- verbose_name='Status')),
- ('created',
- django_handleref.models.CreatedDateTimeField(
- auto_now_add=True, verbose_name='Created')),
- ('updated',
- django_handleref.models.UpdatedDateTimeField(
- auto_now=True, verbose_name='Updated')),
- ('version', models.IntegerField(default=0)),
- ('name', models.CharField(blank=True, max_length=255)),
- ('descr', models.TextField(blank=True)),
- ('mtu', models.PositiveIntegerField(blank=True, null=True)),
- ('vlan', models.PositiveIntegerField(blank=True, null=True)),
- ('dot1q_support', models.BooleanField(default=False)),
- ('rs_asn',
- django_inet.models.ASNField(blank=True, default=0,
- null=True)),
- ('arp_sponge',
- django_inet.models.MacAddressField(blank=True, max_length=17,
- null=True, unique=True)),
- ('ix',
- models.ForeignKey(default=0,
- on_delete=django.db.models.deletion.CASCADE,
- related_name='ixlan_set',
- to='peeringdb_server.InternetExchange')),
- ],
- options={
- 'db_table': 'peeringdb_ixlan',
- },
- managers=[
- ('handleref', django.db.models.manager.Manager()),
+ ("id", models.AutoField(primary_key=True, serialize=False)),
+ (
+ "status",
+ models.CharField(blank=True, max_length=255, verbose_name="Status"),
+ ),
+ (
+ "created",
+ django_handleref.models.CreatedDateTimeField(
+ auto_now_add=True, verbose_name="Created"
+ ),
+ ),
+ (
+ "updated",
+ django_handleref.models.UpdatedDateTimeField(
+ auto_now=True, verbose_name="Updated"
+ ),
+ ),
+ ("version", models.IntegerField(default=0)),
+ ("name", models.CharField(blank=True, max_length=255)),
+ ("descr", models.TextField(blank=True)),
+ ("mtu", models.PositiveIntegerField(blank=True, null=True)),
+ ("vlan", models.PositiveIntegerField(blank=True, null=True)),
+ ("dot1q_support", models.BooleanField(default=False)),
+ (
+ "rs_asn",
+ django_inet.models.ASNField(blank=True, default=0, null=True),
+ ),
+ (
+ "arp_sponge",
+ django_inet.models.MacAddressField(
+ blank=True, max_length=17, null=True, unique=True
+ ),
+ ),
+ (
+ "ix",
+ models.ForeignKey(
+ default=0,
+ on_delete=django.db.models.deletion.CASCADE,
+ related_name="ixlan_set",
+ to="peeringdb_server.InternetExchange",
+ ),
+ ),
],
+ options={"db_table": "peeringdb_ixlan",},
+ managers=[("handleref", django.db.models.manager.Manager()),],
),
migrations.CreateModel(
- name='IXLanPrefix',
+ name="IXLanPrefix",
fields=[
- ('id', models.AutoField(primary_key=True, serialize=False)),
- ('status',
- models.CharField(blank=True, max_length=255,
- verbose_name='Status')),
- ('created',
- django_handleref.models.CreatedDateTimeField(
- auto_now_add=True, verbose_name='Created')),
- ('updated',
- django_handleref.models.UpdatedDateTimeField(
- auto_now=True, verbose_name='Updated')),
- ('version', models.IntegerField(default=0)),
- ('notes', models.CharField(blank=True, max_length=255)),
- ('protocol',
- models.CharField(choices=[(b'IPv4', b'IPv4'), (b'IPv6',
- b'IPv6')],
- max_length=64)),
- ('prefix',
- django_inet.models.IPPrefixField(max_length=43, unique=True)),
- ('ixlan',
- models.ForeignKey(
- default=0, on_delete=django.db.models.deletion.CASCADE,
- related_name='ixpfx_set', to='peeringdb_server.IXLan')),
- ],
- options={
- 'abstract': False,
- 'db_table': 'peeringdb_ixlan_prefix',
- },
- managers=[
- ('handleref', django.db.models.manager.Manager()),
+ ("id", models.AutoField(primary_key=True, serialize=False)),
+ (
+ "status",
+ models.CharField(blank=True, max_length=255, verbose_name="Status"),
+ ),
+ (
+ "created",
+ django_handleref.models.CreatedDateTimeField(
+ auto_now_add=True, verbose_name="Created"
+ ),
+ ),
+ (
+ "updated",
+ django_handleref.models.UpdatedDateTimeField(
+ auto_now=True, verbose_name="Updated"
+ ),
+ ),
+ ("version", models.IntegerField(default=0)),
+ ("notes", models.CharField(blank=True, max_length=255)),
+ (
+ "protocol",
+ models.CharField(
+ choices=[(b"IPv4", b"IPv4"), (b"IPv6", b"IPv6")], max_length=64
+ ),
+ ),
+ (
+ "prefix",
+ django_inet.models.IPPrefixField(max_length=43, unique=True),
+ ),
+ (
+ "ixlan",
+ models.ForeignKey(
+ default=0,
+ on_delete=django.db.models.deletion.CASCADE,
+ related_name="ixpfx_set",
+ to="peeringdb_server.IXLan",
+ ),
+ ),
],
+ options={"abstract": False, "db_table": "peeringdb_ixlan_prefix",},
+ managers=[("handleref", django.db.models.manager.Manager()),],
),
migrations.CreateModel(
- name='Network',
+ name="Network",
fields=[
- ('id', models.AutoField(primary_key=True, serialize=False)),
- ('status',
- models.CharField(blank=True, max_length=255,
- verbose_name='Status')),
- ('created',
- django_handleref.models.CreatedDateTimeField(
- auto_now_add=True, verbose_name='Created')),
- ('updated',
- django_handleref.models.UpdatedDateTimeField(
- auto_now=True, verbose_name='Updated')),
- ('version', models.IntegerField(default=0)),
- ('asn', django_inet.models.ASNField(unique=True)),
- ('name', models.CharField(max_length=255, unique=True)),
- ('aka', models.CharField(blank=True, max_length=255)),
- ('irr_as_set', models.CharField(blank=True, max_length=255)),
- ('website',
- django_peeringdb.models.abstract.URLField(
- blank=True, max_length=255)),
- ('looking_glass',
- django_peeringdb.models.abstract.URLField(
- blank=True, max_length=255)),
- ('route_server',
- django_peeringdb.models.abstract.URLField(
- blank=True, max_length=255)),
- ('notes', models.TextField(blank=True)),
- ('notes_private', models.TextField(blank=True)),
- ('info_traffic',
- models.CharField(blank=True, choices=[
- (b'', b'Not Disclosed'), (b'0-20 Mbps', b'0-20 Mbps'),
- (b'20-100Mbps', b'20-100Mbps'), (b'100-1000Mbps',
- b'100-1000Mbps'),
- (b'1-5Gbps', b'1-5Gbps'), (b'5-10Gbps',
- b'5-10Gbps'), (b'10-20Gbps',
- b'10-20Gbps'),
- (b'20-50 Gbps',
- b'20-50 Gbps'), (b'50-100 Gbps',
- b'50-100 Gbps'), (b'100+ Gbps',
- b'100+ Gbps'),
- (b'100-200 Gbps',
- b'100-200 Gbps'), (b'200-300 Gbps',
- b'200-300 Gbps'), (b'300-500 Gbps',
- b'300-500 Gbps'),
- (b'500-1000 Gbps',
- b'500-1000 Gbps'), (b'1 Tbps+',
- b'1 Tbps+'), (b'10 Tbps+',
- b'10 Tbps+')
- ], max_length=39)),
- ('info_ratio',
- models.CharField(blank=True, choices=[
- (b'', b'Not Disclosed'), (b'Not Disclosed',
- b'Not Disclosed'),
- (b'Heavy Outbound',
- b'Heavy Outbound'), (b'Mostly Outbound',
- b'Mostly Outbound'), (b'Balanced',
- b'Balanced'),
- (b'Mostly Inbound', b'Mostly Inbound'), (b'Heavy Inbound',
- b'Heavy Inbound')
- ], default=b'Not Disclosed', max_length=45)),
- ('info_scope',
- models.CharField(blank=True, choices=[
- (b'', b'Not Disclosed'), (b'Not Disclosed',
- b'Not Disclosed'),
- (b'Regional',
- b'Regional'), (b'North America',
- b'North America'), (b'Asia Pacific',
- b'Asia Pacific'),
- (b'Europe', b'Europe'), (b'South America',
- b'South America'), (b'Africa',
- b'Africa'),
- (b'Australia',
- b'Australia'), (b'Middle East',
- b'Middle East'), (b'Global', b'Global')
- ], default=b'Not Disclosed', max_length=39)),
- ('info_type',
- models.CharField(blank=True, choices=[
- (b'', b'Not Disclosed'), (b'Not Disclosed',
- b'Not Disclosed'),
- (b'NSP', b'NSP'), (b'Content',
- b'Content'), (b'Cable/DSL/ISP',
- b'Cable/DSL/ISP'),
- (b'Enterprise', b'Enterprise'), (b'Educational/Research',
- b'Educational/Research'),
- (b'Non-Profit', b'Non-Profit'), (b'Route Server',
- b'Route Server')
- ], default=b'Not Disclosed', max_length=60)),
- ('info_prefixes4',
- models.PositiveIntegerField(blank=True, null=True)),
- ('info_prefixes6',
- models.PositiveIntegerField(blank=True, null=True)),
- ('info_unicast', models.BooleanField(default=False)),
- ('info_multicast', models.BooleanField(default=False)),
- ('info_ipv6', models.BooleanField(default=False)),
- ('policy_url',
- django_peeringdb.models.abstract.URLField(
- blank=True, max_length=255)),
- ('policy_general',
- models.CharField(blank=True,
- choices=[(b'Open', b'Open'), (b'Selective',
- b'Selective'),
- (b'Restrictive', b'Restrictive'),
- (b'No', b'No')], max_length=72)),
- ('policy_locations',
- models.CharField(
- blank=True,
- choices=[(b'Not Required',
- b'Not Required'), (b'Preferred', b'Preferred'),
- (b'Required - US',
- b'Required - US'), (b'Required - EU',
- b'Required - EU'),
- (b'Required - International',
- b'Required - International')], max_length=72)),
- ('policy_ratio', models.BooleanField(default=False)),
- ('policy_contracts',
- models.CharField(blank=True,
- choices=[(b'Not Required', b'Not Required'),
- (b'Private Only', b'Private Only'),
- (b'Required',
- b'Required')], max_length=36)),
+ ("id", models.AutoField(primary_key=True, serialize=False)),
+ (
+ "status",
+ models.CharField(blank=True, max_length=255, verbose_name="Status"),
+ ),
+ (
+ "created",
+ django_handleref.models.CreatedDateTimeField(
+ auto_now_add=True, verbose_name="Created"
+ ),
+ ),
+ (
+ "updated",
+ django_handleref.models.UpdatedDateTimeField(
+ auto_now=True, verbose_name="Updated"
+ ),
+ ),
+ ("version", models.IntegerField(default=0)),
+ ("asn", django_inet.models.ASNField(unique=True)),
+ ("name", models.CharField(max_length=255, unique=True)),
+ ("aka", models.CharField(blank=True, max_length=255)),
+ ("irr_as_set", models.CharField(blank=True, max_length=255)),
+ (
+ "website",
+ django_peeringdb.models.abstract.URLField(
+ blank=True, max_length=255
+ ),
+ ),
+ (
+ "looking_glass",
+ django_peeringdb.models.abstract.URLField(
+ blank=True, max_length=255
+ ),
+ ),
+ (
+ "route_server",
+ django_peeringdb.models.abstract.URLField(
+ blank=True, max_length=255
+ ),
+ ),
+ ("notes", models.TextField(blank=True)),
+ ("notes_private", models.TextField(blank=True)),
+ (
+ "info_traffic",
+ models.CharField(
+ blank=True,
+ choices=[
+ (b"", b"Not Disclosed"),
+ (b"0-20 Mbps", b"0-20 Mbps"),
+ (b"20-100Mbps", b"20-100Mbps"),
+ (b"100-1000Mbps", b"100-1000Mbps"),
+ (b"1-5Gbps", b"1-5Gbps"),
+ (b"5-10Gbps", b"5-10Gbps"),
+ (b"10-20Gbps", b"10-20Gbps"),
+ (b"20-50 Gbps", b"20-50 Gbps"),
+ (b"50-100 Gbps", b"50-100 Gbps"),
+ (b"100+ Gbps", b"100+ Gbps"),
+ (b"100-200 Gbps", b"100-200 Gbps"),
+ (b"200-300 Gbps", b"200-300 Gbps"),
+ (b"300-500 Gbps", b"300-500 Gbps"),
+ (b"500-1000 Gbps", b"500-1000 Gbps"),
+ (b"1 Tbps+", b"1 Tbps+"),
+ (b"10 Tbps+", b"10 Tbps+"),
+ ],
+ max_length=39,
+ ),
+ ),
+ (
+ "info_ratio",
+ models.CharField(
+ blank=True,
+ choices=[
+ (b"", b"Not Disclosed"),
+ (b"Not Disclosed", b"Not Disclosed"),
+ (b"Heavy Outbound", b"Heavy Outbound"),
+ (b"Mostly Outbound", b"Mostly Outbound"),
+ (b"Balanced", b"Balanced"),
+ (b"Mostly Inbound", b"Mostly Inbound"),
+ (b"Heavy Inbound", b"Heavy Inbound"),
+ ],
+ default=b"Not Disclosed",
+ max_length=45,
+ ),
+ ),
+ (
+ "info_scope",
+ models.CharField(
+ blank=True,
+ choices=[
+ (b"", b"Not Disclosed"),
+ (b"Not Disclosed", b"Not Disclosed"),
+ (b"Regional", b"Regional"),
+ (b"North America", b"North America"),
+ (b"Asia Pacific", b"Asia Pacific"),
+ (b"Europe", b"Europe"),
+ (b"South America", b"South America"),
+ (b"Africa", b"Africa"),
+ (b"Australia", b"Australia"),
+ (b"Middle East", b"Middle East"),
+ (b"Global", b"Global"),
+ ],
+ default=b"Not Disclosed",
+ max_length=39,
+ ),
+ ),
+ (
+ "info_type",
+ models.CharField(
+ blank=True,
+ choices=[
+ (b"", b"Not Disclosed"),
+ (b"Not Disclosed", b"Not Disclosed"),
+ (b"NSP", b"NSP"),
+ (b"Content", b"Content"),
+ (b"Cable/DSL/ISP", b"Cable/DSL/ISP"),
+ (b"Enterprise", b"Enterprise"),
+ (b"Educational/Research", b"Educational/Research"),
+ (b"Non-Profit", b"Non-Profit"),
+ (b"Route Server", b"Route Server"),
+ ],
+ default=b"Not Disclosed",
+ max_length=60,
+ ),
+ ),
+ ("info_prefixes4", models.PositiveIntegerField(blank=True, null=True)),
+ ("info_prefixes6", models.PositiveIntegerField(blank=True, null=True)),
+ ("info_unicast", models.BooleanField(default=False)),
+ ("info_multicast", models.BooleanField(default=False)),
+ ("info_ipv6", models.BooleanField(default=False)),
+ (
+ "policy_url",
+ django_peeringdb.models.abstract.URLField(
+ blank=True, max_length=255
+ ),
+ ),
+ (
+ "policy_general",
+ models.CharField(
+ blank=True,
+ choices=[
+ (b"Open", b"Open"),
+ (b"Selective", b"Selective"),
+ (b"Restrictive", b"Restrictive"),
+ (b"No", b"No"),
+ ],
+ max_length=72,
+ ),
+ ),
+ (
+ "policy_locations",
+ models.CharField(
+ blank=True,
+ choices=[
+ (b"Not Required", b"Not Required"),
+ (b"Preferred", b"Preferred"),
+ (b"Required - US", b"Required - US"),
+ (b"Required - EU", b"Required - EU"),
+ (b"Required - International", b"Required - International"),
+ ],
+ max_length=72,
+ ),
+ ),
+ ("policy_ratio", models.BooleanField(default=False)),
+ (
+ "policy_contracts",
+ models.CharField(
+ blank=True,
+ choices=[
+ (b"Not Required", b"Not Required"),
+ (b"Private Only", b"Private Only"),
+ (b"Required", b"Required"),
+ ],
+ max_length=36,
+ ),
+ ),
],
options={
- 'abstract': False,
- 'db_table': 'peeringdb_network',
- 'verbose_name_plural': 'Networks',
+ "abstract": False,
+ "db_table": "peeringdb_network",
+ "verbose_name_plural": "Networks",
},
- managers=[
- ('handleref', django.db.models.manager.Manager()),
- ],
+ managers=[("handleref", django.db.models.manager.Manager()),],
),
migrations.CreateModel(
- name='NetworkContact',
+ name="NetworkContact",
fields=[
- ('id', models.AutoField(primary_key=True, serialize=False)),
- ('status',
- models.CharField(blank=True, max_length=255,
- verbose_name='Status')),
- ('created',
- django_handleref.models.CreatedDateTimeField(
- auto_now_add=True, verbose_name='Created')),
- ('updated',
- django_handleref.models.UpdatedDateTimeField(
- auto_now=True, verbose_name='Updated')),
- ('version', models.IntegerField(default=0)),
- ('role',
- models.CharField(
- choices=[(b'Abuse', b'Abuse'), (b'Maintenance',
- b'Maintenance'),
- (b'Policy', b'Policy'), (b'Technical',
- b'Technical'),
- (b'NOC', b'NOC'), (b'Public Relations',
- b'Public Relations'),
- (b'Sales', b'Sales')], max_length=27)),
- ('visible',
- models.CharField(choices=[(b'Private', b'Private'),
- (b'Users', b'Users'), (b'Public',
- b'Public')],
- default=b'Public', max_length=64)),
- ('name', models.CharField(blank=True, max_length=254)),
- ('phone', models.CharField(blank=True, max_length=100)),
- ('email', models.EmailField(blank=True, max_length=254)),
- ('url',
- django_peeringdb.models.abstract.URLField(
- blank=True, max_length=255)),
- ('network',
- models.ForeignKey(
- default=0, on_delete=django.db.models.deletion.CASCADE,
- related_name='poc_set', to='peeringdb_server.Network')),
- ],
- options={
- 'db_table': 'peeringdb_network_contact',
- },
- managers=[
- ('handleref', django.db.models.manager.Manager()),
+ ("id", models.AutoField(primary_key=True, serialize=False)),
+ (
+ "status",
+ models.CharField(blank=True, max_length=255, verbose_name="Status"),
+ ),
+ (
+ "created",
+ django_handleref.models.CreatedDateTimeField(
+ auto_now_add=True, verbose_name="Created"
+ ),
+ ),
+ (
+ "updated",
+ django_handleref.models.UpdatedDateTimeField(
+ auto_now=True, verbose_name="Updated"
+ ),
+ ),
+ ("version", models.IntegerField(default=0)),
+ (
+ "role",
+ models.CharField(
+ choices=[
+ (b"Abuse", b"Abuse"),
+ (b"Maintenance", b"Maintenance"),
+ (b"Policy", b"Policy"),
+ (b"Technical", b"Technical"),
+ (b"NOC", b"NOC"),
+ (b"Public Relations", b"Public Relations"),
+ (b"Sales", b"Sales"),
+ ],
+ max_length=27,
+ ),
+ ),
+ (
+ "visible",
+ models.CharField(
+ choices=[
+ (b"Private", b"Private"),
+ (b"Users", b"Users"),
+ (b"Public", b"Public"),
+ ],
+ default=b"Public",
+ max_length=64,
+ ),
+ ),
+ ("name", models.CharField(blank=True, max_length=254)),
+ ("phone", models.CharField(blank=True, max_length=100)),
+ ("email", models.EmailField(blank=True, max_length=254)),
+ (
+ "url",
+ django_peeringdb.models.abstract.URLField(
+ blank=True, max_length=255
+ ),
+ ),
+ (
+ "network",
+ models.ForeignKey(
+ default=0,
+ on_delete=django.db.models.deletion.CASCADE,
+ related_name="poc_set",
+ to="peeringdb_server.Network",
+ ),
+ ),
],
+ options={"db_table": "peeringdb_network_contact",},
+ managers=[("handleref", django.db.models.manager.Manager()),],
),
migrations.CreateModel(
- name='NetworkFacility',
+ name="NetworkFacility",
fields=[
- ('id', models.AutoField(primary_key=True, serialize=False)),
- ('status',
- models.CharField(blank=True, max_length=255,
- verbose_name='Status')),
- ('created',
- django_handleref.models.CreatedDateTimeField(
- auto_now_add=True, verbose_name='Created')),
- ('updated',
- django_handleref.models.UpdatedDateTimeField(
- auto_now=True, verbose_name='Updated')),
- ('version', models.IntegerField(default=0)),
- ('local_asn', django_inet.models.ASNField(
- blank=True, null=True)),
- ('avail_sonet', models.BooleanField(default=False)),
- ('avail_ethernet', models.BooleanField(default=False)),
- ('avail_atm', models.BooleanField(default=False)),
- ('facility',
- models.ForeignKey(default=0,
- on_delete=django.db.models.deletion.CASCADE,
- related_name='netfac_set',
- to='peeringdb_server.Facility')),
- ('network',
- models.ForeignKey(default=0,
- on_delete=django.db.models.deletion.CASCADE,
- related_name='netfac_set',
- to='peeringdb_server.Network')),
- ],
- options={
- 'db_table': 'peeringdb_network_facility',
- },
- managers=[
- ('handleref', django.db.models.manager.Manager()),
+ ("id", models.AutoField(primary_key=True, serialize=False)),
+ (
+ "status",
+ models.CharField(blank=True, max_length=255, verbose_name="Status"),
+ ),
+ (
+ "created",
+ django_handleref.models.CreatedDateTimeField(
+ auto_now_add=True, verbose_name="Created"
+ ),
+ ),
+ (
+ "updated",
+ django_handleref.models.UpdatedDateTimeField(
+ auto_now=True, verbose_name="Updated"
+ ),
+ ),
+ ("version", models.IntegerField(default=0)),
+ ("local_asn", django_inet.models.ASNField(blank=True, null=True)),
+ ("avail_sonet", models.BooleanField(default=False)),
+ ("avail_ethernet", models.BooleanField(default=False)),
+ ("avail_atm", models.BooleanField(default=False)),
+ (
+ "facility",
+ models.ForeignKey(
+ default=0,
+ on_delete=django.db.models.deletion.CASCADE,
+ related_name="netfac_set",
+ to="peeringdb_server.Facility",
+ ),
+ ),
+ (
+ "network",
+ models.ForeignKey(
+ default=0,
+ on_delete=django.db.models.deletion.CASCADE,
+ related_name="netfac_set",
+ to="peeringdb_server.Network",
+ ),
+ ),
],
+ options={"db_table": "peeringdb_network_facility",},
+ managers=[("handleref", django.db.models.manager.Manager()),],
),
migrations.CreateModel(
- name='NetworkIXLan',
+ name="NetworkIXLan",
fields=[
- ('id', models.AutoField(primary_key=True, serialize=False)),
- ('status',
- models.CharField(blank=True, max_length=255,
- verbose_name='Status')),
- ('created',
- django_handleref.models.CreatedDateTimeField(
- auto_now_add=True, verbose_name='Created')),
- ('updated',
- django_handleref.models.UpdatedDateTimeField(
- auto_now=True, verbose_name='Updated')),
- ('version', models.IntegerField(default=0)),
- ('asn', django_inet.models.ASNField()),
- ('ipaddr4',
- django_inet.models.IPAddressField(blank=True, max_length=39,
- null=True)),
- ('ipaddr6',
- django_inet.models.IPAddressField(blank=True, max_length=39,
- null=True)),
- ('is_rs_peer', models.BooleanField(default=False)),
- ('notes', models.CharField(blank=True, max_length=255)),
- ('speed', models.PositiveIntegerField()),
- ('ixlan',
- models.ForeignKey(default=0,
- on_delete=django.db.models.deletion.CASCADE,
- related_name='netixlan_set',
- to='peeringdb_server.IXLan')),
- ('network',
- models.ForeignKey(default=0,
- on_delete=django.db.models.deletion.CASCADE,
- related_name='netixlan_set',
- to='peeringdb_server.Network')),
- ],
- options={
- 'db_table': 'peeringdb_network_ixlan',
- },
- managers=[
- ('handleref', django.db.models.manager.Manager()),
+ ("id", models.AutoField(primary_key=True, serialize=False)),
+ (
+ "status",
+ models.CharField(blank=True, max_length=255, verbose_name="Status"),
+ ),
+ (
+ "created",
+ django_handleref.models.CreatedDateTimeField(
+ auto_now_add=True, verbose_name="Created"
+ ),
+ ),
+ (
+ "updated",
+ django_handleref.models.UpdatedDateTimeField(
+ auto_now=True, verbose_name="Updated"
+ ),
+ ),
+ ("version", models.IntegerField(default=0)),
+ ("asn", django_inet.models.ASNField()),
+ (
+ "ipaddr4",
+ django_inet.models.IPAddressField(
+ blank=True, max_length=39, null=True
+ ),
+ ),
+ (
+ "ipaddr6",
+ django_inet.models.IPAddressField(
+ blank=True, max_length=39, null=True
+ ),
+ ),
+ ("is_rs_peer", models.BooleanField(default=False)),
+ ("notes", models.CharField(blank=True, max_length=255)),
+ ("speed", models.PositiveIntegerField()),
+ (
+ "ixlan",
+ models.ForeignKey(
+ default=0,
+ on_delete=django.db.models.deletion.CASCADE,
+ related_name="netixlan_set",
+ to="peeringdb_server.IXLan",
+ ),
+ ),
+ (
+ "network",
+ models.ForeignKey(
+ default=0,
+ on_delete=django.db.models.deletion.CASCADE,
+ related_name="netixlan_set",
+ to="peeringdb_server.Network",
+ ),
+ ),
],
+ options={"db_table": "peeringdb_network_ixlan",},
+ managers=[("handleref", django.db.models.manager.Manager()),],
),
migrations.CreateModel(
- name='Organization',
+ name="Organization",
fields=[
- ('id', models.AutoField(primary_key=True, serialize=False)),
- ('status',
- models.CharField(blank=True, max_length=255,
- verbose_name='Status')),
- ('created',
- django_handleref.models.CreatedDateTimeField(
- auto_now_add=True, verbose_name='Created')),
- ('updated',
- django_handleref.models.UpdatedDateTimeField(
- auto_now=True, verbose_name='Updated')),
- ('version', models.IntegerField(default=0)),
- ('address1', models.CharField(blank=True, max_length=255)),
- ('address2', models.CharField(blank=True, max_length=255)),
- ('city', models.CharField(blank=True, max_length=255)),
- ('state', models.CharField(blank=True, max_length=255)),
- ('zipcode', models.CharField(blank=True, max_length=48)),
- ('country',
- django_countries.fields.CountryField(blank=True,
- max_length=2)),
- ('name', models.CharField(max_length=255, unique=True)),
- ('website',
- django_peeringdb.models.abstract.URLField(
- blank=True, max_length=255)),
- ('notes', models.TextField(blank=True)),
- ('logo',
- models.FileField(
- blank=True, help_text=
- b'Allows you to upload and set a logo image file for this organization',
- null=True, upload_to=b'logos/')),
+ ("id", models.AutoField(primary_key=True, serialize=False)),
+ (
+ "status",
+ models.CharField(blank=True, max_length=255, verbose_name="Status"),
+ ),
+ (
+ "created",
+ django_handleref.models.CreatedDateTimeField(
+ auto_now_add=True, verbose_name="Created"
+ ),
+ ),
+ (
+ "updated",
+ django_handleref.models.UpdatedDateTimeField(
+ auto_now=True, verbose_name="Updated"
+ ),
+ ),
+ ("version", models.IntegerField(default=0)),
+ ("address1", models.CharField(blank=True, max_length=255)),
+ ("address2", models.CharField(blank=True, max_length=255)),
+ ("city", models.CharField(blank=True, max_length=255)),
+ ("state", models.CharField(blank=True, max_length=255)),
+ ("zipcode", models.CharField(blank=True, max_length=48)),
+ (
+ "country",
+ django_countries.fields.CountryField(blank=True, max_length=2),
+ ),
+ ("name", models.CharField(max_length=255, unique=True)),
+ (
+ "website",
+ django_peeringdb.models.abstract.URLField(
+ blank=True, max_length=255
+ ),
+ ),
+ ("notes", models.TextField(blank=True)),
+ (
+ "logo",
+ models.FileField(
+ blank=True,
+ help_text=b"Allows you to upload and set a logo image file for this organization",
+ null=True,
+ upload_to=b"logos/",
+ ),
+ ),
],
options={
- 'abstract': False,
- 'db_table': 'peeringdb_organization',
- 'verbose_name_plural': 'Organizations',
+ "abstract": False,
+ "db_table": "peeringdb_organization",
+ "verbose_name_plural": "Organizations",
},
- managers=[
- ('handleref', django.db.models.manager.Manager()),
- ],
+ managers=[("handleref", django.db.models.manager.Manager()),],
),
migrations.CreateModel(
- name='OrganizationMerge',
+ name="OrganizationMerge",
fields=[
- ('id',
- models.AutoField(auto_created=True, primary_key=True,
- serialize=False, verbose_name='ID')),
- ('created',
- models.DateTimeField(auto_now_add=True,
- verbose_name='Merged on')),
- ('from_org',
- models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,
- related_name='merged_to',
- to='peeringdb_server.Organization')),
- ('to_org',
- models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,
- related_name='merged_from',
- to='peeringdb_server.Organization')),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
+ (
+ "created",
+ models.DateTimeField(auto_now_add=True, verbose_name="Merged on"),
+ ),
+ (
+ "from_org",
+ models.ForeignKey(
+ on_delete=django.db.models.deletion.CASCADE,
+ related_name="merged_to",
+ to="peeringdb_server.Organization",
+ ),
+ ),
+ (
+ "to_org",
+ models.ForeignKey(
+ on_delete=django.db.models.deletion.CASCADE,
+ related_name="merged_from",
+ to="peeringdb_server.Organization",
+ ),
+ ),
],
options={
- 'db_table': 'peeringdb_organization_merge',
- 'verbose_name': 'Organization Merge',
- 'verbose_name_plural': 'Organization Merges',
+ "db_table": "peeringdb_organization_merge",
+ "verbose_name": "Organization Merge",
+ "verbose_name_plural": "Organization Merges",
},
),
migrations.CreateModel(
- name='OrganizationMergeEntity',
+ name="OrganizationMergeEntity",
fields=[
- ('id',
- models.AutoField(auto_created=True, primary_key=True,
- serialize=False, verbose_name='ID')),
- ('object_id', models.PositiveIntegerField()),
- ('note', models.CharField(blank=True, max_length=32,
- null=True)),
- ('content_type',
- models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,
- to='contenttypes.ContentType')),
- ('merge',
- models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,
- related_name='entities',
- to='peeringdb_server.OrganizationMerge')),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
+ ("object_id", models.PositiveIntegerField()),
+ ("note", models.CharField(blank=True, max_length=32, null=True)),
+ (
+ "content_type",
+ models.ForeignKey(
+ on_delete=django.db.models.deletion.CASCADE,
+ to="contenttypes.ContentType",
+ ),
+ ),
+ (
+ "merge",
+ models.ForeignKey(
+ on_delete=django.db.models.deletion.CASCADE,
+ related_name="entities",
+ to="peeringdb_server.OrganizationMerge",
+ ),
+ ),
],
options={
- 'db_table': 'peeringdb_organization_merge_entity',
- 'verbose_name': 'Organization Merge: Entity',
- 'verbose_name_plural': 'Organization Merge: Entities',
+ "db_table": "peeringdb_organization_merge_entity",
+ "verbose_name": "Organization Merge: Entity",
+ "verbose_name_plural": "Organization Merge: Entities",
},
),
migrations.CreateModel(
- name='Sponsorship',
+ name="Sponsorship",
fields=[
- ('id',
- models.AutoField(auto_created=True, primary_key=True,
- serialize=False, verbose_name='ID')),
- ('start_date',
- models.DateTimeField(
- default=peeringdb_server.models.default_time_s,
- verbose_name='Sponsorship starts on')),
- ('end_date',
- models.DateTimeField(
- default=peeringdb_server.models.default_time_e,
- verbose_name='Sponsorship ends on')),
- ('notify_date',
- models.DateTimeField(
- blank=True, null=True,
- verbose_name='Expiration notification sent on')),
- ('level',
- models.PositiveIntegerField(
- choices=[(1, 'Silver'), (2, 'Gold'), (3, 'Platinum'),
- (4, 'Diamond')], default=1)),
- ('url',
- models.URLField(
- blank=True,
- help_text=
- 'If specified clicking the sponsorship will take the user to this location',
- null=True, verbose_name='URL')),
- ('logo',
- models.FileField(
- blank=True, help_text=
- b'Allows you to upload and set a logo image file for this sponsorship',
- null=True, upload_to=b'logos/')),
- ('org',
- models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,
- related_name='sponsorships',
- to='peeringdb_server.Organization')),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
+ (
+ "start_date",
+ models.DateTimeField(
+ default=peeringdb_server.models.default_time_s,
+ verbose_name="Sponsorship starts on",
+ ),
+ ),
+ (
+ "end_date",
+ models.DateTimeField(
+ default=peeringdb_server.models.default_time_e,
+ verbose_name="Sponsorship ends on",
+ ),
+ ),
+ (
+ "notify_date",
+ models.DateTimeField(
+ blank=True,
+ null=True,
+ verbose_name="Expiration notification sent on",
+ ),
+ ),
+ (
+ "level",
+ models.PositiveIntegerField(
+ choices=[
+ (1, "Silver"),
+ (2, "Gold"),
+ (3, "Platinum"),
+ (4, "Diamond"),
+ ],
+ default=1,
+ ),
+ ),
+ (
+ "url",
+ models.URLField(
+ blank=True,
+ help_text="If specified clicking the sponsorship will take the user to this location",
+ null=True,
+ verbose_name="URL",
+ ),
+ ),
+ (
+ "logo",
+ models.FileField(
+ blank=True,
+ help_text=b"Allows you to upload and set a logo image file for this sponsorship",
+ null=True,
+ upload_to=b"logos/",
+ ),
+ ),
+ (
+ "org",
+ models.ForeignKey(
+ on_delete=django.db.models.deletion.CASCADE,
+ related_name="sponsorships",
+ to="peeringdb_server.Organization",
+ ),
+ ),
],
options={
- 'db_table': 'peeringdb_sponsorship',
- 'verbose_name': 'Sponsorship',
- 'verbose_name_plural': 'Sponsorships',
+ "db_table": "peeringdb_sponsorship",
+ "verbose_name": "Sponsorship",
+ "verbose_name_plural": "Sponsorships",
},
),
migrations.CreateModel(
- name='UserOrgAffiliationRequest',
+ name="UserOrgAffiliationRequest",
fields=[
- ('id',
- models.AutoField(auto_created=True, primary_key=True,
- serialize=False, verbose_name='ID')),
- ('org_name',
- models.CharField(
- blank=True,
- help_text=b'The organization name entered by the user',
- max_length=255, null=True)),
- ('asn',
- django_inet.models.ASNField(
- blank=True, help_text=b'The ASN entered by the user',
- null=True)),
- ('created',
- django_handleref.models.CreatedDateTimeField(
- auto_now_add=True, verbose_name='Created')),
- ('status',
- models.CharField(
- choices=[(b'pending', b'Pending'), (b'approved',
- b'Approved'),
- (b'denied', b'Denied')],
- help_text=b'Status of this request', max_length=254)),
- ('org',
- models.ForeignKey(
- blank=True, help_text=
- b'This organization in our database that was derived from the provided ASN or organization name. If this is empty it means no matching organization was found.',
- null=True, on_delete=django.db.models.deletion.CASCADE,
- related_name='affiliation_requests',
- to='peeringdb_server.Organization')),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
+ (
+ "org_name",
+ models.CharField(
+ blank=True,
+ help_text=b"The organization name entered by the user",
+ max_length=255,
+ null=True,
+ ),
+ ),
+ (
+ "asn",
+ django_inet.models.ASNField(
+ blank=True, help_text=b"The ASN entered by the user", null=True
+ ),
+ ),
+ (
+ "created",
+ django_handleref.models.CreatedDateTimeField(
+ auto_now_add=True, verbose_name="Created"
+ ),
+ ),
+ (
+ "status",
+ models.CharField(
+ choices=[
+ (b"pending", b"Pending"),
+ (b"approved", b"Approved"),
+ (b"denied", b"Denied"),
+ ],
+ help_text=b"Status of this request",
+ max_length=254,
+ ),
+ ),
+ (
+ "org",
+ models.ForeignKey(
+ blank=True,
+ help_text=b"This organization in our database that was derived from the provided ASN or organization name. If this is empty it means no matching organization was found.",
+ null=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ related_name="affiliation_requests",
+ to="peeringdb_server.Organization",
+ ),
+ ),
],
- options={
- 'db_table': 'peeringdb_user_org_affil_request',
- },
+ options={"db_table": "peeringdb_user_org_affil_request",},
),
migrations.CreateModel(
- name='VerificationQueueItem',
+ name="VerificationQueueItem",
fields=[
- ('id',
- models.AutoField(auto_created=True, primary_key=True,
- serialize=False, verbose_name='ID')),
- ('object_id', models.PositiveIntegerField()),
- ('created',
- django_handleref.models.CreatedDateTimeField(
- auto_now_add=True, verbose_name='Created')),
- ('notified', models.BooleanField(default=False)),
- ('content_type',
- models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,
- to='contenttypes.ContentType')),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
+ ("object_id", models.PositiveIntegerField()),
+ (
+ "created",
+ django_handleref.models.CreatedDateTimeField(
+ auto_now_add=True, verbose_name="Created"
+ ),
+ ),
+ ("notified", models.BooleanField(default=False)),
+ (
+ "content_type",
+ models.ForeignKey(
+ on_delete=django.db.models.deletion.CASCADE,
+ to="contenttypes.ContentType",
+ ),
+ ),
],
- options={
- 'db_table': 'peeringdb_verification_queue',
- },
+ options={"db_table": "peeringdb_verification_queue",},
),
migrations.CreateModel(
- name='UserPasswordReset',
+ name="UserPasswordReset",
fields=[
- ('user',
- models.OneToOneField(
- on_delete=django.db.models.deletion.CASCADE,
- primary_key=True, related_name='password_reset',
- serialize=False, to=settings.AUTH_USER_MODEL)),
- ('token', models.CharField(max_length=255)),
- ('created', models.DateTimeField(auto_now_add=True)),
+ (
+ "user",
+ models.OneToOneField(
+ on_delete=django.db.models.deletion.CASCADE,
+ primary_key=True,
+ related_name="password_reset",
+ serialize=False,
+ to=settings.AUTH_USER_MODEL,
+ ),
+ ),
+ ("token", models.CharField(max_length=255)),
+ ("created", models.DateTimeField(auto_now_add=True)),
],
- options={
- 'db_table': 'peeringdb_user_password_reset',
- },
+ options={"db_table": "peeringdb_user_password_reset",},
),
migrations.AddField(
- model_name='verificationqueueitem',
- name='user',
+ model_name="verificationqueueitem",
+ name="user",
field=models.ForeignKey(
- blank=True, help_text=
- b'The item that this queue is attached to was created by this user',
- null=True, on_delete=django.db.models.deletion.CASCADE,
- related_name='vqitems', to=settings.AUTH_USER_MODEL),
+ blank=True,
+ help_text=b"The item that this queue is attached to was created by this user",
+ null=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ related_name="vqitems",
+ to=settings.AUTH_USER_MODEL,
+ ),
),
migrations.AddField(
- model_name='userorgaffiliationrequest',
- name='user',
+ model_name="userorgaffiliationrequest",
+ name="user",
field=models.ForeignKey(
- help_text=b'The user that made the request',
+ help_text=b"The user that made the request",
on_delete=django.db.models.deletion.CASCADE,
- related_name='affiliation_requests',
- to=settings.AUTH_USER_MODEL),
+ related_name="affiliation_requests",
+ to=settings.AUTH_USER_MODEL,
+ ),
),
migrations.AddField(
- model_name='network',
- name='org',
+ model_name="network",
+ name="org",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
- related_name='net_set', to='peeringdb_server.Organization'),
+ related_name="net_set",
+ to="peeringdb_server.Organization",
+ ),
),
migrations.AddField(
- model_name='internetexchange',
- name='org',
+ model_name="internetexchange",
+ name="org",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
- related_name='ix_set', to='peeringdb_server.Organization'),
+ related_name="ix_set",
+ to="peeringdb_server.Organization",
+ ),
),
migrations.AddField(
- model_name='facility',
- name='org',
+ model_name="facility",
+ name="org",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
- related_name='fac_set', to='peeringdb_server.Organization'),
+ related_name="fac_set",
+ to="peeringdb_server.Organization",
+ ),
),
migrations.AddField(
- model_name='user',
- name='groups',
+ model_name="user",
+ name="groups",
field=models.ManyToManyField(
- blank=True, help_text=
- 'The groups this user belongs to. A user will get all permissions granted to each of their groups.',
- related_name='user_set', related_query_name='user',
- to='auth.Group', verbose_name='groups'),
+ blank=True,
+ help_text="The groups this user belongs to. A user will get all permissions granted to each of their groups.",
+ related_name="user_set",
+ related_query_name="user",
+ to="auth.Group",
+ verbose_name="groups",
+ ),
),
migrations.AddField(
- model_name='user',
- name='user_permissions',
+ model_name="user",
+ name="user_permissions",
field=models.ManyToManyField(
- blank=True, help_text='Specific permissions for this user.',
- related_name='user_set', related_query_name='user',
- to='auth.Permission', verbose_name='user permissions'),
+ blank=True,
+ help_text="Specific permissions for this user.",
+ related_name="user_set",
+ related_query_name="user",
+ to="auth.Permission",
+ verbose_name="user permissions",
+ ),
),
migrations.CreateModel(
- name='DuplicateIPNetworkIXLan',
+ name="DuplicateIPNetworkIXLan",
fields=[],
options={
- 'verbose_name': 'Duplicate IP',
- 'proxy': True,
- 'verbose_name_plural': 'Duplicate IPs',
- 'indexes': [],
+ "verbose_name": "Duplicate IP",
+ "proxy": True,
+ "verbose_name_plural": "Duplicate IPs",
+ "indexes": [],
},
- bases=('peeringdb_server.networkixlan', ),
- managers=[
- ('handleref', django.db.models.manager.Manager()),
- ],
+ bases=("peeringdb_server.networkixlan",),
+ managers=[("handleref", django.db.models.manager.Manager()),],
),
migrations.CreateModel(
- name='UserPermission',
+ name="UserPermission",
fields=[],
options={
- 'verbose_name': 'User Permission',
- 'proxy': True,
- 'verbose_name_plural': 'User Permissions',
- 'indexes': [],
+ "verbose_name": "User Permission",
+ "proxy": True,
+ "verbose_name_plural": "User Permissions",
+ "indexes": [],
},
- bases=('peeringdb_server.user', ),
- managers=[
- ('objects', django.contrib.auth.models.UserManager()),
- ],
+ bases=("peeringdb_server.user",),
+ managers=[("objects", django.contrib.auth.models.UserManager()),],
),
migrations.AlterUniqueTogether(
- name='networkfacility',
- unique_together=set([('network', 'facility', 'local_asn')]),
+ name="networkfacility",
+ unique_together=set([("network", "facility", "local_asn")]),
),
migrations.AlterUniqueTogether(
- name='internetexchangefacility',
- unique_together=set([('ix', 'facility')]),
+ name="internetexchangefacility", unique_together=set([("ix", "facility")]),
),
]
diff --git a/peeringdb_server/migrations/0002_partnernship_model.py b/peeringdb_server/migrations/0002_partnernship_model.py
index e3b5b367..4e4c9ec7 100644
--- a/peeringdb_server/migrations/0002_partnernship_model.py
+++ b/peeringdb_server/migrations/0002_partnernship_model.py
@@ -9,39 +9,60 @@
class Migration(migrations.Migration):
dependencies = [
- ('peeringdb_server', '0001_initial'),
+ ("peeringdb_server", "0001_initial"),
]
operations = [
migrations.CreateModel(
- name='Partnership',
+ name="Partnership",
fields=[
- ('id',
- models.AutoField(auto_created=True, primary_key=True,
- serialize=False, verbose_name='ID')),
- ('level',
- models.PositiveIntegerField(
- choices=[(1, 'Data Validation Partner'),
- (2, 'RIR Partner')], default=1)),
- ('url',
- models.URLField(
- blank=True, help_text=
- 'If specified clicking the partnership will take the user to this location',
- null=True, verbose_name='URL')),
- ('logo',
- models.FileField(
- blank=True, help_text=
- b'Allows you to upload and set a logo image file for this partnership',
- null=True, upload_to=b'logos/')),
- ('org',
- models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,
- related_name='partnerships',
- to='peeringdb_server.Organization')),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
+ (
+ "level",
+ models.PositiveIntegerField(
+ choices=[(1, "Data Validation Partner"), (2, "RIR Partner")],
+ default=1,
+ ),
+ ),
+ (
+ "url",
+ models.URLField(
+ blank=True,
+ help_text="If specified clicking the partnership will take the user to this location",
+ null=True,
+ verbose_name="URL",
+ ),
+ ),
+ (
+ "logo",
+ models.FileField(
+ blank=True,
+ help_text=b"Allows you to upload and set a logo image file for this partnership",
+ null=True,
+ upload_to=b"logos/",
+ ),
+ ),
+ (
+ "org",
+ models.ForeignKey(
+ on_delete=django.db.models.deletion.CASCADE,
+ related_name="partnerships",
+ to="peeringdb_server.Organization",
+ ),
+ ),
],
options={
- 'db_table': 'peeringdb_partnership',
- 'verbose_name': 'Partnership',
- 'verbose_name_plural': 'Partnerships',
+ "db_table": "peeringdb_partnership",
+ "verbose_name": "Partnership",
+ "verbose_name_plural": "Partnerships",
},
),
]
diff --git a/peeringdb_server/migrations/0003_add_lat_lon_to_address_models.py b/peeringdb_server/migrations/0003_add_lat_lon_to_address_models.py
index 9ac51c7d..c9ac9dba 100644
--- a/peeringdb_server/migrations/0003_add_lat_lon_to_address_models.py
+++ b/peeringdb_server/migrations/0003_add_lat_lon_to_address_models.py
@@ -8,42 +8,59 @@
class Migration(migrations.Migration):
dependencies = [
- ('peeringdb_server', '0002_partnernship_model'),
+ ("peeringdb_server", "0002_partnernship_model"),
]
operations = [
migrations.AddField(
- model_name='facility',
- name='lat',
- field=models.DecimalField(blank=True, decimal_places=6,
- help_text=b'Latitude', max_digits=9,
- null=True),
+ model_name="facility",
+ name="lat",
+ field=models.DecimalField(
+ blank=True,
+ decimal_places=6,
+ help_text=b"Latitude",
+ max_digits=9,
+ null=True,
+ ),
),
migrations.AddField(
- model_name='facility',
- name='lon',
- field=models.DecimalField(blank=True, decimal_places=6,
- help_text=b'Longitude', max_digits=9,
- null=True),
+ model_name="facility",
+ name="lon",
+ field=models.DecimalField(
+ blank=True,
+ decimal_places=6,
+ help_text=b"Longitude",
+ max_digits=9,
+ null=True,
+ ),
),
migrations.AddField(
- model_name='organization',
- name='lat',
- field=models.DecimalField(blank=True, decimal_places=6,
- help_text=b'Latitude', max_digits=9,
- null=True),
+ model_name="organization",
+ name="lat",
+ field=models.DecimalField(
+ blank=True,
+ decimal_places=6,
+ help_text=b"Latitude",
+ max_digits=9,
+ null=True,
+ ),
),
migrations.AddField(
- model_name='organization',
- name='lon',
- field=models.DecimalField(blank=True, decimal_places=6,
- help_text=b'Longitude', max_digits=9,
- null=True),
+ model_name="organization",
+ name="lon",
+ field=models.DecimalField(
+ blank=True,
+ decimal_places=6,
+ help_text=b"Longitude",
+ max_digits=9,
+ null=True,
+ ),
),
migrations.AlterField(
- model_name='partnership',
- name='level',
- field=models.PositiveIntegerField(choices=[(1, 'Data Validation'),
- (2, 'RIR')], default=1),
+ model_name="partnership",
+ name="level",
+ field=models.PositiveIntegerField(
+ choices=[(1, "Data Validation"), (2, "RIR")], default=1
+ ),
),
]
diff --git a/peeringdb_server/migrations/0004_geocode_fields.py b/peeringdb_server/migrations/0004_geocode_fields.py
index 975b55a2..38139767 100644
--- a/peeringdb_server/migrations/0004_geocode_fields.py
+++ b/peeringdb_server/migrations/0004_geocode_fields.py
@@ -8,31 +8,32 @@
class Migration(migrations.Migration):
dependencies = [
- ('peeringdb_server', '0003_add_lat_lon_to_address_models'),
+ ("peeringdb_server", "0003_add_lat_lon_to_address_models"),
]
operations = [
migrations.AddField(
- model_name='facility',
- name='geocode_date',
+ model_name="facility",
+ name="geocode_date",
field=models.DateTimeField(
- blank=True, help_text=b'Last time of attempted geocode',
- null=True),
+ blank=True, help_text=b"Last time of attempted geocode", null=True
+ ),
),
migrations.AddField(
- model_name='facility',
- name='geocode_error',
+ model_name="facility",
+ name="geocode_error",
field=models.TextField(
blank=True,
- help_text=b'Error message of previous geocode attempt',
- null=True),
+ help_text=b"Error message of previous geocode attempt",
+ null=True,
+ ),
),
migrations.AddField(
- model_name='facility',
- name='geocode_status',
+ model_name="facility",
+ name="geocode_status",
field=models.BooleanField(
- default=False, help_text=
- b"Has this object's latitude and longitude been syncronized to it's address fields"
+ default=False,
+ help_text=b"Has this object's latitude and longitude been syncronized to it's address fields",
),
),
]
diff --git a/peeringdb_server/migrations/0005_lat_lon_field_rename.py b/peeringdb_server/migrations/0005_lat_lon_field_rename.py
index 74b96234..1f8dbf93 100644
--- a/peeringdb_server/migrations/0005_lat_lon_field_rename.py
+++ b/peeringdb_server/migrations/0005_lat_lon_field_rename.py
@@ -8,28 +8,20 @@
class Migration(migrations.Migration):
dependencies = [
- ('peeringdb_server', '0004_geocode_fields'),
+ ("peeringdb_server", "0004_geocode_fields"),
]
operations = [
migrations.RenameField(
- model_name='facility',
- old_name='lat',
- new_name='latitude',
+ model_name="facility", old_name="lat", new_name="latitude",
),
migrations.RenameField(
- model_name='facility',
- old_name='lon',
- new_name='longitude',
+ model_name="facility", old_name="lon", new_name="longitude",
),
migrations.RenameField(
- model_name='organization',
- old_name='lat',
- new_name='latitude',
+ model_name="organization", old_name="lat", new_name="latitude",
),
migrations.RenameField(
- model_name='organization',
- old_name='lon',
- new_name='longitude',
+ model_name="organization", old_name="lon", new_name="longitude",
),
]
diff --git a/peeringdb_server/migrations/0006_network_allow_ixp_update.py b/peeringdb_server/migrations/0006_network_allow_ixp_update.py
index 9878b1af..298ce8f6 100644
--- a/peeringdb_server/migrations/0006_network_allow_ixp_update.py
+++ b/peeringdb_server/migrations/0006_network_allow_ixp_update.py
@@ -8,16 +8,16 @@
class Migration(migrations.Migration):
dependencies = [
- ('peeringdb_server', '0005_lat_lon_field_rename'),
+ ("peeringdb_server", "0005_lat_lon_field_rename"),
]
operations = [
migrations.AddField(
- model_name='network',
- name='allow_ixp_update',
+ model_name="network",
+ name="allow_ixp_update",
field=models.BooleanField(
- default=False, help_text=
- b'Sepcifies whether an ixp is allowed to add a netixlan entry for this network via their ixp_member data'
+ default=False,
+ help_text=b"Sepcifies whether an ixp is allowed to add a netixlan entry for this network via their ixp_member data",
),
),
]
diff --git a/peeringdb_server/migrations/0007_ixlan_json_member_list_url.py b/peeringdb_server/migrations/0007_ixlan_json_member_list_url.py
index c2e6e348..c67c92b8 100644
--- a/peeringdb_server/migrations/0007_ixlan_json_member_list_url.py
+++ b/peeringdb_server/migrations/0007_ixlan_json_member_list_url.py
@@ -8,13 +8,13 @@
class Migration(migrations.Migration):
dependencies = [
- ('peeringdb_server', '0006_network_allow_ixp_update'),
+ ("peeringdb_server", "0006_network_allow_ixp_update"),
]
operations = [
migrations.AddField(
- model_name='ixlan',
- name='json_member_list_url',
+ model_name="ixlan",
+ name="json_member_list_url",
field=models.URLField(blank=True, null=True),
),
]
diff --git a/peeringdb_server/migrations/0008_ixf_import_log.py b/peeringdb_server/migrations/0008_ixf_import_log.py
index d38374dc..6098d7f9 100644
--- a/peeringdb_server/migrations/0008_ixf_import_log.py
+++ b/peeringdb_server/migrations/0008_ixf_import_log.py
@@ -9,61 +9,97 @@
class Migration(migrations.Migration):
dependencies = [
- ('reversion', '0001_squashed_0004_auto_20160611_1202'),
- ('peeringdb_server', '0007_ixlan_json_member_list_url'),
+ ("reversion", "0001_squashed_0004_auto_20160611_1202"),
+ ("peeringdb_server", "0007_ixlan_json_member_list_url"),
]
operations = [
migrations.CreateModel(
- name='IXLanIXFMemberImportAttempt',
+ name="IXLanIXFMemberImportAttempt",
fields=[
- ('ixlan',
- models.OneToOneField(
- on_delete=django.db.models.deletion.CASCADE,
- primary_key=True, related_name='ixf_import_attempt',
- serialize=False, to='peeringdb_server.IXLan')),
- ('updated', models.DateTimeField(auto_now=True)),
- ('info', models.TextField(blank=True, null=True)),
+ (
+ "ixlan",
+ models.OneToOneField(
+ on_delete=django.db.models.deletion.CASCADE,
+ primary_key=True,
+ related_name="ixf_import_attempt",
+ serialize=False,
+ to="peeringdb_server.IXLan",
+ ),
+ ),
+ ("updated", models.DateTimeField(auto_now=True)),
+ ("info", models.TextField(blank=True, null=True)),
],
),
migrations.CreateModel(
- name='IXLanIXFMemberImportLog',
+ name="IXLanIXFMemberImportLog",
fields=[
- ('id',
- models.AutoField(auto_created=True, primary_key=True,
- serialize=False, verbose_name='ID')),
- ('created', models.DateTimeField(auto_now_add=True)),
- ('updated', models.DateTimeField(auto_now=True)),
- ('ixlan',
- models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,
- related_name='ixf_import_log_set',
- to='peeringdb_server.IXLan')),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
+ ("created", models.DateTimeField(auto_now_add=True)),
+ ("updated", models.DateTimeField(auto_now=True)),
+ (
+ "ixlan",
+ models.ForeignKey(
+ on_delete=django.db.models.deletion.CASCADE,
+ related_name="ixf_import_log_set",
+ to="peeringdb_server.IXLan",
+ ),
+ ),
],
),
migrations.CreateModel(
- name='IXLanIXFMemberImportLogEntry',
+ name="IXLanIXFMemberImportLogEntry",
fields=[
- ('id',
- models.AutoField(auto_created=True, primary_key=True,
- serialize=False, verbose_name='ID')),
- ('log',
- models.ForeignKey(
- on_delete=django.db.models.deletion.CASCADE,
- related_name='entries',
- to='peeringdb_server.IXLanIXFMemberImportLog')),
- ('netixlan',
- models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,
- related_name='ixf_import_log_entries',
- to='peeringdb_server.NetworkIXLan')),
- ('version_after',
- models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,
- related_name='ixf_import_log_after',
- to='reversion.Version')),
- ('version_before',
- models.ForeignKey(null=True,
- on_delete=django.db.models.deletion.CASCADE,
- related_name='ixf_import_log_before',
- to='reversion.Version')),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
+ (
+ "log",
+ models.ForeignKey(
+ on_delete=django.db.models.deletion.CASCADE,
+ related_name="entries",
+ to="peeringdb_server.IXLanIXFMemberImportLog",
+ ),
+ ),
+ (
+ "netixlan",
+ models.ForeignKey(
+ on_delete=django.db.models.deletion.CASCADE,
+ related_name="ixf_import_log_entries",
+ to="peeringdb_server.NetworkIXLan",
+ ),
+ ),
+ (
+ "version_after",
+ models.ForeignKey(
+ on_delete=django.db.models.deletion.CASCADE,
+ related_name="ixf_import_log_after",
+ to="reversion.Version",
+ ),
+ ),
+ (
+ "version_before",
+ models.ForeignKey(
+ null=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ related_name="ixf_import_log_before",
+ to="reversion.Version",
+ ),
+ ),
],
),
]
diff --git a/peeringdb_server/migrations/0009_rename_json_member_list_field.py b/peeringdb_server/migrations/0009_rename_json_member_list_field.py
index 2aaeebf6..afb13e80 100644
--- a/peeringdb_server/migrations/0009_rename_json_member_list_field.py
+++ b/peeringdb_server/migrations/0009_rename_json_member_list_field.py
@@ -8,27 +8,27 @@
class Migration(migrations.Migration):
dependencies = [
- ('peeringdb_server', '0008_ixf_import_log'),
+ ("peeringdb_server", "0008_ixf_import_log"),
]
operations = [
migrations.AlterModelOptions(
- name='ixlanixfmemberimportlog',
+ name="ixlanixfmemberimportlog",
options={
- 'verbose_name': 'IXF Import Log',
- 'verbose_name_plural': 'IXF Import Logs'
+ "verbose_name": "IXF Import Log",
+ "verbose_name_plural": "IXF Import Logs",
},
),
migrations.AlterModelOptions(
- name='ixlanixfmemberimportlogentry',
+ name="ixlanixfmemberimportlogentry",
options={
- 'verbose_name': 'IXF Import Log Entry',
- 'verbose_name_plural': 'IXF Import Log Entries'
+ "verbose_name": "IXF Import Log Entry",
+ "verbose_name_plural": "IXF Import Log Entries",
},
),
migrations.RenameField(
- model_name='ixlan',
- old_name='json_member_list_url',
- new_name='ixf_member_export_url',
+ model_name="ixlan",
+ old_name="json_member_list_url",
+ new_name="ixf_member_export_url",
),
]
diff --git a/peeringdb_server/migrations/0010_rename_ixf_ixp_member_list_url.py b/peeringdb_server/migrations/0010_rename_ixf_ixp_member_list_url.py
index 0eb8704b..8eece8ab 100644
--- a/peeringdb_server/migrations/0010_rename_ixf_ixp_member_list_url.py
+++ b/peeringdb_server/migrations/0010_rename_ixf_ixp_member_list_url.py
@@ -8,13 +8,13 @@
class Migration(migrations.Migration):
dependencies = [
- ('peeringdb_server', '0009_rename_json_member_list_field'),
+ ("peeringdb_server", "0009_rename_json_member_list_field"),
]
operations = [
migrations.RenameField(
- model_name='ixlan',
- old_name='ixf_member_export_url',
- new_name='ixf_ixp_member_list_url',
+ model_name="ixlan",
+ old_name="ixf_member_export_url",
+ new_name="ixf_ixp_member_list_url",
),
]
diff --git a/peeringdb_server/migrations/0011_commandline_tool.py b/peeringdb_server/migrations/0011_commandline_tool.py
index 905ebaa3..8214ffc6 100644
--- a/peeringdb_server/migrations/0011_commandline_tool.py
+++ b/peeringdb_server/migrations/0011_commandline_tool.py
@@ -10,36 +10,56 @@
class Migration(migrations.Migration):
dependencies = [
- ('peeringdb_server', '0010_rename_ixf_ixp_member_list_url'),
+ ("peeringdb_server", "0010_rename_ixf_ixp_member_list_url"),
]
operations = [
migrations.CreateModel(
- name='CommandLineTool',
+ name="CommandLineTool",
fields=[
- ('id',
- models.AutoField(auto_created=True, primary_key=True,
- serialize=False, verbose_name='ID')),
- ('tool',
- models.CharField(
- choices=[(b'pdb_renumber_lans', 'Renumber IP Space')],
- help_text='name of the tool', max_length=255)),
- ('arguments',
- models.TextField(
- help_text=
- 'json serialization of arguments and options passed')),
- ('result',
- models.TextField(blank=True, help_text='result log',
- null=True)),
- ('created',
- models.DateTimeField(
- auto_now_add=True,
- help_text=b'command was run at this date and time')),
- ('user',
- models.ForeignKey(help_text=b'the user that ran this command',
- on_delete=django.db.models.deletion.CASCADE,
- related_name='clt_history',
- to=settings.AUTH_USER_MODEL)),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
+ (
+ "tool",
+ models.CharField(
+ choices=[(b"pdb_renumber_lans", "Renumber IP Space")],
+ help_text="name of the tool",
+ max_length=255,
+ ),
+ ),
+ (
+ "arguments",
+ models.TextField(
+ help_text="json serialization of arguments and options passed"
+ ),
+ ),
+ (
+ "result",
+ models.TextField(blank=True, help_text="result log", null=True),
+ ),
+ (
+ "created",
+ models.DateTimeField(
+ auto_now_add=True,
+ help_text=b"command was run at this date and time",
+ ),
+ ),
+ (
+ "user",
+ models.ForeignKey(
+ help_text=b"the user that ran this command",
+ on_delete=django.db.models.deletion.CASCADE,
+ related_name="clt_history",
+ to=settings.AUTH_USER_MODEL,
+ ),
+ ),
],
),
]
diff --git a/peeringdb_server/migrations/0012_deskpro.py b/peeringdb_server/migrations/0012_deskpro.py
index 4cb2303e..f4b36cad 100644
--- a/peeringdb_server/migrations/0012_deskpro.py
+++ b/peeringdb_server/migrations/0012_deskpro.py
@@ -10,30 +10,40 @@
class Migration(migrations.Migration):
dependencies = [
- ('peeringdb_server', '0011_commandline_tool'),
+ ("peeringdb_server", "0011_commandline_tool"),
]
operations = [
migrations.CreateModel(
- name='DeskProTicket',
+ name="DeskProTicket",
fields=[
- ('id',
- models.AutoField(auto_created=True, primary_key=True,
- serialize=False, verbose_name='ID')),
- ('subject', models.CharField(max_length=255)),
- ('body', models.TextField()),
- ('created', models.DateTimeField(auto_now_add=True)),
- ('published', models.DateTimeField(null=True)),
- ('user',
- models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,
- to=settings.AUTH_USER_MODEL)),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
+ ("subject", models.CharField(max_length=255)),
+ ("body", models.TextField()),
+ ("created", models.DateTimeField(auto_now_add=True)),
+ ("published", models.DateTimeField(null=True)),
+ (
+ "user",
+ models.ForeignKey(
+ on_delete=django.db.models.deletion.CASCADE,
+ to=settings.AUTH_USER_MODEL,
+ ),
+ ),
],
),
migrations.AlterModelOptions(
- name='userorgaffiliationrequest',
+ name="userorgaffiliationrequest",
options={
- 'verbose_name': 'User to Organization Affiliation Request',
- 'verbose_name_plural': 'User to Organization Affiliation Requests'
+ "verbose_name": "User to Organization Affiliation Request",
+ "verbose_name_plural": "User to Organization Affiliation Requests",
},
),
]
diff --git a/peeringdb_server/migrations/0013_user_locale.py b/peeringdb_server/migrations/0013_user_locale.py
index 4d8b34cd..a213df11 100644
--- a/peeringdb_server/migrations/0013_user_locale.py
+++ b/peeringdb_server/migrations/0013_user_locale.py
@@ -8,14 +8,15 @@
class Migration(migrations.Migration):
dependencies = [
- ('peeringdb_server', '0012_deskpro'),
+ ("peeringdb_server", "0012_deskpro"),
]
operations = [
migrations.AddField(
- model_name='user',
- name='locale',
- field=models.CharField(blank=True, max_length=62, null=True,
- verbose_name='language'),
+ model_name="user",
+ name="locale",
+ field=models.CharField(
+ blank=True, max_length=62, null=True, verbose_name="language"
+ ),
),
]
diff --git a/peeringdb_server/migrations/0014_clt_description.py b/peeringdb_server/migrations/0014_clt_description.py
index 75cbed69..ebd16ac5 100644
--- a/peeringdb_server/migrations/0014_clt_description.py
+++ b/peeringdb_server/migrations/0014_clt_description.py
@@ -8,25 +8,31 @@
class Migration(migrations.Migration):
dependencies = [
- ('peeringdb_server', '0013_user_locale'),
+ ("peeringdb_server", "0013_user_locale"),
]
operations = [
migrations.AddField(
- model_name='commandlinetool',
- name='description',
+ model_name="commandlinetool",
+ name="description",
field=models.CharField(
blank=True,
- help_text='Descriptive text of command that can be searched',
- max_length=255, null=True),
+ help_text="Descriptive text of command that can be searched",
+ max_length=255,
+ null=True,
+ ),
),
migrations.AlterField(
- model_name='commandlinetool',
- name='tool',
+ model_name="commandlinetool",
+ name="tool",
field=models.CharField(
- choices=[(b'pdb_renumber_lans', 'Renumber IP Space'),
- (b'pdb_fac_merge', 'Merge Facilities'),
- (b'pdb_fac_merge_undo', 'Merge Facilities: UNDO')],
- help_text='name of the tool', max_length=255),
+ choices=[
+ (b"pdb_renumber_lans", "Renumber IP Space"),
+ (b"pdb_fac_merge", "Merge Facilities"),
+ (b"pdb_fac_merge_undo", "Merge Facilities: UNDO"),
+ ],
+ help_text="name of the tool",
+ max_length=255,
+ ),
),
]
diff --git a/peeringdb_server/migrations/0015_email_address.py b/peeringdb_server/migrations/0015_email_address.py
index 3fb5de60..342e482c 100644
--- a/peeringdb_server/migrations/0015_email_address.py
+++ b/peeringdb_server/migrations/0015_email_address.py
@@ -6,27 +6,24 @@
def create_email_instances(apps, schema_editor):
users = apps.get_model("peeringdb_server", "User")
- emailAddresses = apps.get_model('account', "EmailAddress")
+ emailAddresses = apps.get_model("account", "EmailAddress")
all_emails = emailAddresses.objects.all()
emails = []
emails_dict = {}
for user in users.objects.all():
l_email = user.email.lower()
- if not all_emails.filter(
- email=l_email).exists() and l_email not in emails_dict:
+ if not all_emails.filter(email=l_email).exists() and l_email not in emails_dict:
emails_dict[l_email] = 1
- emails.append(
- emailAddresses(email=l_email, user=user, primary=True))
+ emails.append(emailAddresses(email=l_email, user=user, primary=True))
emailAddresses.objects.bulk_create(emails)
class Migration(migrations.Migration):
dependencies = [
- ('peeringdb_server', '0014_clt_description'),
+ ("peeringdb_server", "0014_clt_description"),
]
operations = [
- migrations.RunPython(create_email_instances,
- migrations.RunPython.noop),
+ migrations.RunPython(create_email_instances, migrations.RunPython.noop),
]
diff --git a/peeringdb_server/migrations/0016_auto_20190110_2321.py b/peeringdb_server/migrations/0016_auto_20190110_2321.py
index bca2d89d..cabebccc 100644
--- a/peeringdb_server/migrations/0016_auto_20190110_2321.py
+++ b/peeringdb_server/migrations/0016_auto_20190110_2321.py
@@ -8,18 +8,35 @@
class Migration(migrations.Migration):
dependencies = [
- ('peeringdb_server', '0015_email_address'),
+ ("peeringdb_server", "0015_email_address"),
]
operations = [
migrations.AddField(
- model_name='commandlinetool',
- name='status',
- field=models.CharField(choices=[(b'done', 'Done'), (b'waiting', 'Waiting'), (b'running', 'Running')], default=b'done', max_length=255),
+ model_name="commandlinetool",
+ name="status",
+ field=models.CharField(
+ choices=[
+ (b"done", "Done"),
+ (b"waiting", "Waiting"),
+ (b"running", "Running"),
+ ],
+ default=b"done",
+ max_length=255,
+ ),
),
migrations.AlterField(
- model_name='commandlinetool',
- name='tool',
- field=models.CharField(choices=[(b'pdb_renumber_lans', 'Renumber IP Space'), (b'pdb_fac_merge', 'Merge Facilities'), (b'pdb_fac_merge_undo', 'Merge Facilities: UNDO'), (b'pdb_wipe', b'Reset')], help_text='name of the tool', max_length=255),
+ model_name="commandlinetool",
+ name="tool",
+ field=models.CharField(
+ choices=[
+ (b"pdb_renumber_lans", "Renumber IP Space"),
+ (b"pdb_fac_merge", "Merge Facilities"),
+ (b"pdb_fac_merge_undo", "Merge Facilities: UNDO"),
+ (b"pdb_wipe", b"Reset"),
+ ],
+ help_text="name of the tool",
+ max_length=255,
+ ),
),
]
diff --git a/peeringdb_server/migrations/0017_ixf_ixp_import_enabled.py b/peeringdb_server/migrations/0017_ixf_ixp_import_enabled.py
index 6dc21a47..b5171892 100644
--- a/peeringdb_server/migrations/0017_ixf_ixp_import_enabled.py
+++ b/peeringdb_server/migrations/0017_ixf_ixp_import_enabled.py
@@ -8,13 +8,13 @@
class Migration(migrations.Migration):
dependencies = [
- ('peeringdb_server', '0016_auto_20190110_2321'),
+ ("peeringdb_server", "0016_auto_20190110_2321"),
]
operations = [
migrations.AddField(
- model_name='ixlan',
- name='ixf_ixp_import_enabled',
+ model_name="ixlan",
+ name="ixf_ixp_import_enabled",
field=models.BooleanField(default=False),
),
]
diff --git a/peeringdb_server/migrations/0018_set_ixf_ixp_import_enabled.py b/peeringdb_server/migrations/0018_set_ixf_ixp_import_enabled.py
index 067f4bec..fe187b82 100644
--- a/peeringdb_server/migrations/0018_set_ixf_ixp_import_enabled.py
+++ b/peeringdb_server/migrations/0018_set_ixf_ixp_import_enabled.py
@@ -4,6 +4,7 @@
from django.db import migrations, models
+
def forwards_func(apps, schema_editor):
model = apps.get_model("peeringdb_server", "IXLan")
for ixlan in model.objects.all():
@@ -11,16 +12,14 @@ def forwards_func(apps, schema_editor):
ixlan.ixf_ixp_import_enabled = True
ixlan.save()
+
class Migration(migrations.Migration):
dependencies = [
- ('peeringdb_server', '0017_ixf_ixp_import_enabled'),
+ ("peeringdb_server", "0017_ixf_ixp_import_enabled"),
]
operations = [
- migrations.AlterModelManagers(
- name='ixlan',
- managers=[],
- ),
+ migrations.AlterModelManagers(name="ixlan", managers=[],),
migrations.RunPython(forwards_func, migrations.RunPython.noop),
]
diff --git a/peeringdb_server/migrations/0019_auto_20190819_1133.py b/peeringdb_server/migrations/0019_auto_20190819_1133.py
index a0af895a..f7d905b5 100644
--- a/peeringdb_server/migrations/0019_auto_20190819_1133.py
+++ b/peeringdb_server/migrations/0019_auto_20190819_1133.py
@@ -9,29 +9,36 @@
class Migration(migrations.Migration):
dependencies = [
- ('peeringdb_server', '0018_set_ixf_ixp_import_enabled'),
+ ("peeringdb_server", "0018_set_ixf_ixp_import_enabled"),
]
operations = [
migrations.AlterModelManagers(
- name='ixlan',
- managers=[
- ('handleref', django.db.models.manager.Manager()),
- ],
+ name="ixlan", managers=[("handleref", django.db.models.manager.Manager()),],
),
migrations.AddField(
- model_name='ixlanixfmemberimportlogentry',
- name='action',
+ model_name="ixlanixfmemberimportlogentry",
+ name="action",
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
- model_name='ixlanixfmemberimportlogentry',
- name='reason',
+ model_name="ixlanixfmemberimportlogentry",
+ name="reason",
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
- model_name='commandlinetool',
- name='tool',
- field=models.CharField(choices=[(b'pdb_renumber_lans', 'Renumber IP Space'), (b'pdb_fac_merge', 'Merge Facilities'), (b'pdb_fac_merge_undo', 'Merge Facilities: UNDO'), (b'pdb_undelete', 'Restore Object(s)'), (b'pdb_ixf_ixp_member_import', 'IX-F Import')], help_text='name of the tool', max_length=255),
+ model_name="commandlinetool",
+ name="tool",
+ field=models.CharField(
+ choices=[
+ (b"pdb_renumber_lans", "Renumber IP Space"),
+ (b"pdb_fac_merge", "Merge Facilities"),
+ (b"pdb_fac_merge_undo", "Merge Facilities: UNDO"),
+ (b"pdb_undelete", "Restore Object(s)"),
+ (b"pdb_ixf_ixp_member_import", "IX-F Import"),
+ ],
+ help_text="name of the tool",
+ max_length=255,
+ ),
),
]
diff --git a/peeringdb_server/migrations/0020_sponsorship_multi_org.py b/peeringdb_server/migrations/0020_sponsorship_multi_org.py
index 88404853..9f996ebc 100644
--- a/peeringdb_server/migrations/0020_sponsorship_multi_org.py
+++ b/peeringdb_server/migrations/0020_sponsorship_multi_org.py
@@ -5,43 +5,89 @@
from django.db import migrations, models
import django.db.models.deletion
+
def forwards_func(apps, schema_editor):
"""
move relation shit from sponsorship->org to org->sponsorship
to allow a many orgs to many sponsorship relation
"""
- Sponsorship = apps.get_model("peeringdb_server", "Sponsorship")
- SponsorshipOrganization = apps.get_model("peeringdb_server", "SponsorshipOrganization")
+ Sponsorship = apps.get_model("peeringdb_server", "Sponsorship")
+ SponsorshipOrganization = apps.get_model(
+ "peeringdb_server", "SponsorshipOrganization"
+ )
for sponsorship in Sponsorship.objects.all():
SponsorshipOrganization.objects.create(
org=sponsorship.org,
sponsorship=sponsorship,
url=sponsorship.url,
- logo=sponsorship.logo)
+ logo=sponsorship.logo,
+ )
class Migration(migrations.Migration):
dependencies = [
- ('peeringdb_server', '0020_vqueue_item_unique'),
+ ("peeringdb_server", "0020_vqueue_item_unique"),
]
operations = [
migrations.CreateModel(
- name='SponsorshipOrganization',
+ name="SponsorshipOrganization",
fields=[
- ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
- ('url', models.URLField(blank=True, help_text='If specified clicking the sponsorship will take the user to this location', null=True, verbose_name='URL')),
- ('logo', models.FileField(blank=True, help_text='Allows you to upload and set a logo image file for this sponsorship', null=True, upload_to=b'logos/')),
- ('org', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='sponsorshiporg_set', to='peeringdb_server.Organization')),
- ('sponsorship', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='sponsorshiporg_set', to='peeringdb_server.Sponsorship')),
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
+ (
+ "url",
+ models.URLField(
+ blank=True,
+ help_text="If specified clicking the sponsorship will take the user to this location",
+ null=True,
+ verbose_name="URL",
+ ),
+ ),
+ (
+ "logo",
+ models.FileField(
+ blank=True,
+ help_text="Allows you to upload and set a logo image file for this sponsorship",
+ null=True,
+ upload_to=b"logos/",
+ ),
+ ),
+ (
+ "org",
+ models.ForeignKey(
+ on_delete=django.db.models.deletion.CASCADE,
+ related_name="sponsorshiporg_set",
+ to="peeringdb_server.Organization",
+ ),
+ ),
+ (
+ "sponsorship",
+ models.ForeignKey(
+ on_delete=django.db.models.deletion.CASCADE,
+ related_name="sponsorshiporg_set",
+ to="peeringdb_server.Sponsorship",
+ ),
+ ),
],
),
migrations.AddField(
- model_name='sponsorship',
- name='orgs',
- field=models.ManyToManyField(related_name='sponsorship_set', through='peeringdb_server.SponsorshipOrganization', to='peeringdb_server.Organization'),
+ model_name="sponsorship",
+ name="orgs",
+ field=models.ManyToManyField(
+ related_name="sponsorship_set",
+ through="peeringdb_server.SponsorshipOrganization",
+ to="peeringdb_server.Organization",
+ ),
),
migrations.RunPython(forwards_func, migrations.RunPython.noop),
]
diff --git a/peeringdb_server/migrations/0020_vqueue_item_unique.py b/peeringdb_server/migrations/0020_vqueue_item_unique.py
index 9ccddeaf..d793495e 100644
--- a/peeringdb_server/migrations/0020_vqueue_item_unique.py
+++ b/peeringdb_server/migrations/0020_vqueue_item_unique.py
@@ -8,13 +8,13 @@
class Migration(migrations.Migration):
dependencies = [
- ('contenttypes', '0002_remove_content_type_name'),
- ('peeringdb_server', '0019_auto_20190819_1133'),
+ ("contenttypes", "0002_remove_content_type_name"),
+ ("peeringdb_server", "0019_auto_20190819_1133"),
]
operations = [
migrations.AlterUniqueTogether(
- name='verificationqueueitem',
- unique_together=set([('content_type', 'object_id')]),
+ name="verificationqueueitem",
+ unique_together=set([("content_type", "object_id")]),
),
]
diff --git a/peeringdb_server/migrations/0021_sponsorship_drop_single_org_relation_fields.py b/peeringdb_server/migrations/0021_sponsorship_drop_single_org_relation_fields.py
index 3035a855..93360f39 100644
--- a/peeringdb_server/migrations/0021_sponsorship_drop_single_org_relation_fields.py
+++ b/peeringdb_server/migrations/0021_sponsorship_drop_single_org_relation_fields.py
@@ -8,20 +8,11 @@
class Migration(migrations.Migration):
dependencies = [
- ('peeringdb_server', '0020_sponsorship_multi_org'),
+ ("peeringdb_server", "0020_sponsorship_multi_org"),
]
operations = [
- migrations.RemoveField(
- model_name='sponsorship',
- name='logo',
- ),
- migrations.RemoveField(
- model_name='sponsorship',
- name='org',
- ),
- migrations.RemoveField(
- model_name='sponsorship',
- name='url',
- ),
+ migrations.RemoveField(model_name="sponsorship", name="logo",),
+ migrations.RemoveField(model_name="sponsorship", name="org",),
+ migrations.RemoveField(model_name="sponsorship", name="url",),
]
diff --git a/peeringdb_server/mock.py b/peeringdb_server/mock.py
index 6243a4c3..f3f422b3 100644
--- a/peeringdb_server/mock.py
+++ b/peeringdb_server/mock.py
@@ -47,18 +47,20 @@ def get_hosts(network, count=100):
yield host
# Pool of IPv4 addresses (100 per prefix)
- self.ipaddr_pool_v4 = dict([(prefix,
- list(
- get_hosts(
- ipaddress.IPv4Network(prefix))))
- for prefix in self.prefix_pool_v4])
+ self.ipaddr_pool_v4 = dict(
+ [
+ (prefix, list(get_hosts(ipaddress.IPv4Network(prefix))))
+ for prefix in self.prefix_pool_v4
+ ]
+ )
# Pool of IPv6 addresses (100 per prefix)
- self.ipaddr_pool_v6 = dict([(prefix,
- list(
- get_hosts(
- ipaddress.IPv6Network(prefix))))
- for prefix in self.prefix_pool_v6])
+ self.ipaddr_pool_v6 = dict(
+ [
+ (prefix, list(get_hosts(ipaddress.IPv6Network(prefix))))
+ for prefix in self.prefix_pool_v6
+ ]
+ )
def create(self, reftag, **kwargs):
"""
@@ -82,8 +84,7 @@ def create(self, reftag, **kwargs):
continue
if field.is_relation and field.many_to_one:
if hasattr(field.related_model, "ref_tag"):
- data[field.name] = self.create(
- field.related_model.handleref.tag)
+ data[field.name] = self.create(field.related_model.handleref.tag)
# then we set the other fields to mock values provided by this class
for field in model._meta.get_fields():
@@ -105,8 +106,11 @@ def create(self, reftag, **kwargs):
#
# PDB choices often have Not Disclosed at index 0 and 1
# so we try index 2 first.
- if not field.is_relation and field.choices and not hasattr(
- self, field.name):
+ if (
+ not field.is_relation
+ and field.choices
+ and not hasattr(self, field.name)
+ ):
try:
data[field.name] = field.choices[2][0]
except IndexError:
@@ -129,13 +133,13 @@ def create(self, reftag, **kwargs):
# URLs
elif field.name.find("url") > -1:
data[field.name] = "{}/{}".format(
- self.website(data, reftag=reftag), field.name)
+ self.website(data, reftag=reftag), field.name
+ )
# everything else is routed to the apropriate method
# with the same name as the field name
else:
- data[field.name] = getattr(self, field.name)(data,
- reftag=reftag)
+ data[field.name] = getattr(self, field.name)(data, reftag=reftag)
return model.objects.create(**data)
def status(self, data, reftag=None):
diff --git a/peeringdb_server/models.py b/peeringdb_server/models.py
index acc31e09..3bdb957e 100644
--- a/peeringdb_server/models.py
+++ b/peeringdb_server/models.py
@@ -29,7 +29,7 @@
UpdatedDateTimeField,
)
import django_peeringdb.models as pdb_models
-from django_inet.models import (ASNField)
+from django_inet.models import ASNField
from allauth.account.models import EmailAddress, EmailConfirmation
from allauth.socialaccount.models import SocialAccount
@@ -37,19 +37,27 @@
from peeringdb_server.inet import RdapLookup, RdapNotFoundError
from peeringdb_server.validators import (
- validate_address_space, validate_info_prefixes4, validate_info_prefixes6,
- validate_prefix_overlap)
+ validate_address_space,
+ validate_info_prefixes4,
+ validate_info_prefixes6,
+ validate_prefix_overlap,
+)
-SPONSORSHIP_LEVELS = ((1, _("Silver")), (2, _("Gold")), (3, _("Platinum")),
- (4, _("Diamond")))
+SPONSORSHIP_LEVELS = (
+ (1, _("Silver")),
+ (2, _("Gold")),
+ (3, _("Platinum")),
+ (4, _("Diamond")),
+)
PARTNERSHIP_LEVELS = ((1, _("Data Validation")), (2, _("RIR")))
-COMMANDLINE_TOOLS = (("pdb_renumber_lans",
- _("Renumber IP Space")), ("pdb_fac_merge",
- _("Merge Facilities")),
- ("pdb_fac_merge_undo", _("Merge Facilities: UNDO")),
- ("pdb_undelete", _("Restore Object(s)")))
+COMMANDLINE_TOOLS = (
+ ("pdb_renumber_lans", _("Renumber IP Space")),
+ ("pdb_fac_merge", _("Merge Facilities")),
+ ("pdb_fac_merge_undo", _("Merge Facilities: UNDO")),
+ ("pdb_undelete", _("Restore Object(s)")),
+)
if settings.TUTORIAL_MODE:
@@ -57,6 +65,7 @@
COMMANDLINE_TOOLS += (("pdb_ixf_ixp_member_import", _("IX-F Import")),)
+
def debug_mail(*args):
for arg in list(args):
print(arg)
@@ -161,6 +170,7 @@ class URLField(pdb_models.URLField):
"""
local defaults for URLField
"""
+
pass
@@ -169,14 +179,19 @@ class GeocodeBaseMixin(models.Model):
Mixin to use for geocode enabled entities
Allows an entity to be geocoded with the pdb_geo_sync command
"""
- geocode_status = models.BooleanField(default=False, help_text=_(
- "Has this object's latitude and longitude been syncronized to it's address fields"
- ))
+
+ geocode_status = models.BooleanField(
+ default=False,
+ help_text=_(
+ "Has this object's latitude and longitude been syncronized to it's address fields"
+ ),
+ )
geocode_date = models.DateTimeField(
- blank=True, null=True, help_text=_("Last time of attempted geocode"))
+ blank=True, null=True, help_text=_("Last time of attempted geocode")
+ )
geocode_error = models.TextField(
- blank=True, null=True,
- help_text=_("Error message of previous geocode attempt"))
+ blank=True, null=True, help_text=_("Error message of previous geocode attempt")
+ )
class Meta(object):
abstract = True
@@ -195,9 +210,10 @@ def geocode_address(self):
"""
Returns an address string suitable for googlemaps query
"""
- #pylint: disable=missing-format-attribute
+ # pylint: disable=missing-format-attribute
return u"{e.address1} {e.address2}, {e.city}, {e.state} {e.zipcode}".format(
- e=self)
+ e=self
+ )
def geocode(self, gmaps, save=True):
"""
@@ -209,13 +225,15 @@ def geocode(self, gmaps, save=True):
- gmaps: googlemaps instance
"""
try:
- result = gmaps.geocode(self.geocode_address, components={
- "country": self.country.code
- })
- if result and ("street_address" in result[0]["types"]
- or "establishment" in result[0]["types"]
- or "premise" in result[0]["types"]
- or "subpremise" in result[0]["types"]):
+ result = gmaps.geocode(
+ self.geocode_address, components={"country": self.country.code}
+ )
+ if result and (
+ "street_address" in result[0]["types"]
+ or "establishment" in result[0]["types"]
+ or "premise" in result[0]["types"]
+ or "subpremise" in result[0]["types"]
+ ):
loc = result[0].get("geometry").get("location")
self.latitude = loc.get("lat")
self.longitude = loc.get("lng")
@@ -226,8 +244,7 @@ def geocode(self, gmaps, save=True):
self.geocode_error = _("Address not found")
self.geocode_status = True
return result
- except (googlemaps.exceptions.HTTPError,
- googlemaps.exceptions.ApiError), inst:
+ except (googlemaps.exceptions.HTTPError, googlemaps.exceptions.ApiError), inst:
self.geocode_error = str(inst)
self.geocode_status = True
except googlemaps.exceptions.Timeout, inst:
@@ -256,28 +273,41 @@ class UserOrgAffiliationRequest(models.Model):
"""
org = models.ForeignKey(
- "peeringdb_server.Organization", null=True, blank=True, help_text=
- _("This organization in our database that was derived from the provided ASN or organization name. If this is empty it means no matching organization was found."
- ), related_name="affiliation_requests")
+ "peeringdb_server.Organization",
+ null=True,
+ blank=True,
+ help_text=_(
+ "This organization in our database that was derived from the provided ASN or organization name. If this is empty it means no matching organization was found."
+ ),
+ related_name="affiliation_requests",
+ )
org_name = models.CharField(
- max_length=255, null=True, blank=True,
- help_text=_("The organization name entered by the user"))
+ max_length=255,
+ null=True,
+ blank=True,
+ help_text=_("The organization name entered by the user"),
+ )
- asn = ASNField(
- help_text=_("The ASN entered by the user"), null=True, blank=True)
+ asn = ASNField(help_text=_("The ASN entered by the user"), null=True, blank=True)
- user = models.ForeignKey("peeringdb_server.User",
- help_text=_("The user that made the request"),
- related_name="affiliation_requests")
+ user = models.ForeignKey(
+ "peeringdb_server.User",
+ help_text=_("The user that made the request"),
+ related_name="affiliation_requests",
+ )
created = CreatedDateTimeField()
status = models.CharField(
- max_length=254, choices=[('pending', _('Pending')), ('approved',
- _('Approved')),
- ('denied', _('Denied'))],
- help_text=_("Status of this request"))
+ max_length=254,
+ choices=[
+ ("pending", _("Pending")),
+ ("approved", _("Approved")),
+ ("denied", _("Denied")),
+ ],
+ help_text=_("Status of this request"),
+ )
class Meta(object):
db_table = "peeringdb_user_org_affil_request"
@@ -302,8 +332,10 @@ def approve(self):
"""
if self.org_id:
- if self.org.admin_usergroup.user_set.count() > 0 or \
- self.org.usergroup.user_set.count() > 0:
+ if (
+ self.org.admin_usergroup.user_set.count() > 0
+ or self.org.usergroup.user_set.count() > 0
+ ):
# if there are other users in this org, add user as normal
# member
self.org.usergroup.user_set.add(self.user)
@@ -336,18 +368,22 @@ def notify_ownership_approved(self):
"""
if not self.org:
return
- #FIXME: why not have the `override` call in email_user in the first place?
+ # FIXME: why not have the `override` call in email_user in the first place?
with override(self.user.locale):
self.user.email_user(
- _(u'Your affiliation to Organization "{}" has been approved')
- .format(self.org.name),
+ _(u'Your affiliation to Organization "{}" has been approved').format(
+ self.org.name
+ ),
loader.get_template(
- 'email/notify-user-uoar-ownership-approved.txt').render({
+ "email/notify-user-uoar-ownership-approved.txt"
+ ).render(
+ {
"uoar": self,
- "org_url": "{}/org/{}".format(settings.BASE_URL,
- self.org.id),
- "support_email": settings.DEFAULT_FROM_EMAIL
- }))
+ "org_url": "{}/org/{}".format(settings.BASE_URL, self.org.id),
+ "support_email": settings.DEFAULT_FROM_EMAIL,
+ }
+ ),
+ )
class VerificationQueueItem(models.Model):
@@ -362,12 +398,14 @@ class VerificationQueueItem(models.Model):
# reference to the item that requires review stored in generic fk
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
- item = GenericForeignKey('content_type', 'object_id')
+ item = GenericForeignKey("content_type", "object_id")
user = models.ForeignKey(
- "peeringdb_server.User", related_name="vqitems", null=True, blank=True,
- help_text=_(
- "The item that this queue is attached to was created by this user"
- ))
+ "peeringdb_server.User",
+ related_name="vqitems",
+ null=True,
+ blank=True,
+ help_text=_("The item that this queue is attached to was created by this user"),
+ )
created = CreatedDateTimeField()
notified = models.BooleanField(default=False)
@@ -385,7 +423,8 @@ def get_for_entity(cls, entity):
return cls.objects.get(
content_type=ContentType.objects.get_for_model(type(entity)),
- object_id=entity.id)
+ object_id=entity.id,
+ )
@property
def item_admin_url(self):
@@ -393,9 +432,10 @@ def item_admin_url(self):
Return admin url for the object in the verification queue
"""
return urlresolvers.reverse(
- "admin:%s_%s_change" % (self.content_type.app_label,
- self.content_type.model),
- args=(self.object_id, ))
+ "admin:%s_%s_change"
+ % (self.content_type.app_label, self.content_type.model),
+ args=(self.object_id,),
+ )
@property
def approve_admin_url(self):
@@ -403,9 +443,9 @@ def approve_admin_url(self):
Return admin url for approval of the verification queue item
"""
return urlresolvers.reverse(
- "admin:%s_%s_actions" % (self._meta.app_label,
- self._meta.model_name),
- args=(self.id, "vq_approve"))
+ "admin:%s_%s_actions" % (self._meta.app_label, self._meta.model_name),
+ args=(self.id, "vq_approve"),
+ )
@property
def deny_admin_url(self):
@@ -413,9 +453,9 @@ def deny_admin_url(self):
Return admin url for denial of the verification queue item
"""
return urlresolvers.reverse(
- "admin:%s_%s_actions" % (self._meta.app_label,
- self._meta.model_name), args=(self.id,
- "vq_deny"))
+ "admin:%s_%s_actions" % (self._meta.app_label, self._meta.model_name),
+ args=(self.id, "vq_deny"),
+ )
@reversion.create_revision()
def approve(self):
@@ -460,9 +500,13 @@ class Organization(pdb_models.OrganizationBase):
# FileField for now as the server doesn't have all the
# dependencies installedd (libjpeg / Pillow)
logo = models.FileField(
- upload_to="logos/", null=True, blank=True, help_text=
- _("Allows you to upload and set a logo image file for this organization"
- ))
+ upload_to="logos/",
+ null=True,
+ blank=True,
+ help_text=_(
+ "Allows you to upload and set a logo image file for this organization"
+ ),
+ )
@staticmethod
def autocomplete_search_fields():
@@ -480,7 +524,8 @@ def admin_url(self):
Return the admin url for this organization (in /cp)
"""
return urlresolvers.reverse(
- "admin:peeringdb_server_organization_change", args=(self.id, ))
+ "admin:peeringdb_server_organization_change", args=(self.id,)
+ )
@property
def owned(self):
@@ -621,15 +666,12 @@ def nsp_ruleset(self):
data
"""
return {
-
# since poc are stored in a list we need to specify a list
# handler for it, its a class function on NetworkContact that
# returns a relative permission namespace for each poc in the
# list
"list-handlers": {
- "poc_set": {
- "namespace": NetworkContact.nsp_namespace_in_list
- }
+ "poc_set": {"namespace": NetworkContact.nsp_namespace_in_list}
}
}
@@ -640,9 +682,11 @@ def sponsorship(self):
has no sponsorship ongoing return None
"""
now = datetime.datetime.now().replace(tzinfo=UTC())
- return self.sponsorship_set.filter(
- start_date__lte=now,
- end_date__gte=now).order_by("-start_date").first()
+ return (
+ self.sponsorship_set.filter(start_date__lte=now, end_date__gte=now)
+ .order_by("-start_date")
+ .first()
+ )
@classmethod
@reversion.create_revision()
@@ -682,13 +726,18 @@ class Sponsorship(models.Model):
for a designated timespan
"""
- orgs = models.ManyToManyField(Organization, through="peeringdb_server.SponsorshipOrganization", related_name="sponsorship_set")
+ orgs = models.ManyToManyField(
+ Organization,
+ through="peeringdb_server.SponsorshipOrganization",
+ related_name="sponsorship_set",
+ )
start_date = models.DateTimeField(
- _("Sponsorship starts on"), default=default_time_s)
- end_date = models.DateTimeField(
- _("Sponsorship ends on"), default=default_time_e)
+ _("Sponsorship starts on"), default=default_time_s
+ )
+ end_date = models.DateTimeField(_("Sponsorship ends on"), default=default_time_e)
notify_date = models.DateTimeField(
- _("Expiration notification sent on"), null=True, blank=True)
+ _("Expiration notification sent on"), null=True, blank=True
+ )
level = models.PositiveIntegerField(choices=SPONSORSHIP_LEVELS, default=1)
class Meta:
@@ -709,7 +758,6 @@ def active_by_org(cls):
for org in sponsorship.orgs.all():
yield org, sponsorship
-
@property
def label(self):
"""
@@ -727,16 +775,17 @@ def notify_expiration(self):
if self.notify_date is not None and self.notify_date >= self.end_date:
return False
msg = loader.get_template(
- 'email/notify-sponsorship-admin-expiration.txt').render({
- "instance": self
- })
+ "email/notify-sponsorship-admin-expiration.txt"
+ ).render({"instance": self})
org_names = ", ".join([org.name for org in self.orgs.all()])
- mail = EmailMultiAlternatives((u'{}: {}').format(
- _("Sponsorship Expired"), org_names), msg,
- settings.DEFAULT_FROM_EMAIL,
- [settings.SPONSORSHIPS_EMAIL])
+ mail = EmailMultiAlternatives(
+ (u"{}: {}").format(_("Sponsorship Expired"), org_names),
+ msg,
+ settings.DEFAULT_FROM_EMAIL,
+ [settings.SPONSORSHIPS_EMAIL],
+ )
mail.attach_alternative(msg.replace("\n", "
\n"), "text/html")
mail.send(fail_silently=True)
@@ -750,17 +799,26 @@ class SponsorshipOrganization(models.Model):
"""
Describes an organization->sponsorship relationship
"""
+
org = models.ForeignKey(Organization, related_name="sponsorshiporg_set")
sponsorship = models.ForeignKey(Sponsorship, related_name="sponsorshiporg_set")
url = models.URLField(
- _("URL"), help_text=
- _("If specified clicking the sponsorship will take the user to this location"
- ), blank=True, null=True)
+ _("URL"),
+ help_text=_(
+ "If specified clicking the sponsorship will take the user to this location"
+ ),
+ blank=True,
+ null=True,
+ )
logo = models.FileField(
- upload_to="logos/", null=True, blank=True, help_text=
- _("Allows you to upload and set a logo image file for this sponsorship"
- ))
+ upload_to="logos/",
+ null=True,
+ blank=True,
+ help_text=_(
+ "Allows you to upload and set a logo image file for this sponsorship"
+ ),
+ )
class Partnership(models.Model):
@@ -773,14 +831,22 @@ class Partnership(models.Model):
org = models.ForeignKey(Organization, related_name="partnerships")
level = models.PositiveIntegerField(choices=PARTNERSHIP_LEVELS, default=1)
url = models.URLField(
- _("URL"), help_text=
- _("If specified clicking the partnership will take the user to this location"
- ), blank=True, null=True)
+ _("URL"),
+ help_text=_(
+ "If specified clicking the partnership will take the user to this location"
+ ),
+ blank=True,
+ null=True,
+ )
logo = models.FileField(
- upload_to="logos/", null=True, blank=True, help_text=
- _("Allows you to upload and set a logo image file for this partnership"
- ))
+ upload_to="logos/",
+ null=True,
+ blank=True,
+ help_text=_(
+ "Allows you to upload and set a logo image file for this partnership"
+ ),
+ )
class Meta:
db_table = "peeringdb_partnership"
@@ -814,8 +880,9 @@ def log_entity(self, entity, note=""):
entity can be any handleref instance or a User instance
"""
- return OrganizationMergeEntity.objects.create(merge=self,
- entity=entity, note=note)
+ return OrganizationMergeEntity.objects.create(
+ merge=self, entity=entity, note=note
+ )
def undo(self):
"""
@@ -853,7 +920,7 @@ class OrganizationMergeEntity(models.Model):
merge = models.ForeignKey(OrganizationMerge, related_name="entities")
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
- entity = GenericForeignKey('content_type', 'object_id')
+ entity = GenericForeignKey("content_type", "object_id")
note = models.CharField(max_length=32, blank=True, null=True)
class Meta:
@@ -900,12 +967,10 @@ def nsp_namespace_in_list(cls):
@classmethod
def nsp_namespace_from_id(cls, org_id, fac_id):
- return "%s.facility.%s" % (Organization.nsp_namespace_from_id(org_id),
- fac_id)
+ return "%s.facility.%s" % (Organization.nsp_namespace_from_id(org_id), fac_id)
@classmethod
- def related_to_net(cls, value=None, filt=None, field="network_id",
- qset=None):
+ def related_to_net(cls, value=None, filt=None, field="network_id", qset=None):
"""
Returns queryset of Facility objects that
are related to the network specified via net_id
@@ -970,7 +1035,8 @@ def overlapping_asns(cls, asns, qset=None):
# of the specified asns
for asn in asns:
for netfac in NetworkFacility.objects.filter(
- network__asn=asn, status="ok").select_related("network"):
+ network__asn=asn, status="ok"
+ ).select_related("network"):
if netfac.facility_id not in facilities:
facilities[netfac.facility_id] = {}
facilities[netfac.facility_id][asn] = True
@@ -1059,8 +1125,7 @@ def __unicode__(self):
return self.name
@classmethod
- def related_to_ixlan(cls, value=None, filt=None, field="ixlan_id",
- qset=None):
+ def related_to_ixlan(cls, value=None, filt=None, field="ixlan_id", qset=None):
"""
Returns queryset of InternetExchange objects that
are related to IXLan specified by ixlan_id
@@ -1077,8 +1142,7 @@ def related_to_ixlan(cls, value=None, filt=None, field="ixlan_id",
return qset.filter(id__in=[ix.ix_id for ix in q])
@classmethod
- def related_to_ixfac(cls, value=None, filt=None, field="ixfac_id",
- qset=None):
+ def related_to_ixfac(cls, value=None, filt=None, field="ixfac_id", qset=None):
"""
Returns queryset of InternetExchange objects that
are related to IXfac link specified by ixfac_id
@@ -1095,8 +1159,7 @@ def related_to_ixfac(cls, value=None, filt=None, field="ixfac_id",
return qset.filter(id__in=[ix.ix_id for ix in q])
@classmethod
- def related_to_fac(cls, filt=None, value=None, field="facility_id",
- qset=None):
+ def related_to_fac(cls, filt=None, value=None, field="facility_id", qset=None):
"""
Returns queryset of InternetExchange objects that
are related to the facility specified by fac_id
@@ -1112,8 +1175,7 @@ def related_to_fac(cls, filt=None, value=None, field="facility_id",
return qset.filter(id__in=[ix.ix_id for ix in q])
@classmethod
- def related_to_net(cls, filt=None, value=None, field="network_id",
- qset=None):
+ def related_to_net(cls, filt=None, value=None, field="network_id", qset=None):
"""
Returns queryset of InternetExchange objects that
are related to the network specified by network_id
@@ -1141,7 +1203,8 @@ def related_to_ipblock(cls, ipblock, qset=None):
qset = cls.handleref.undeleted()
q = IXLanPrefix.objects.select_related("ixlan").filter(
- prefix__startswith=ipblock)
+ prefix__startswith=ipblock
+ )
return qset.filter(id__in=[pfx.ixlan.ix_id for pfx in q])
@@ -1177,8 +1240,8 @@ def overlapping_asns(cls, asns, qset=None):
# of the specified asns
for asn in asns:
for netixlan in NetworkIXLan.objects.filter(
- network__asn=asn, status="ok").select_related(
- "network", "ixlan"):
+ network__asn=asn, status="ok"
+ ).select_related("network", "ixlan"):
if netixlan.ixlan.ix_id not in exchanges:
exchanges[netixlan.ixlan.ix_id] = {}
exchanges[netixlan.ixlan.ix_id][asn] = True
@@ -1228,7 +1291,6 @@ def filter_net_count(cls, filt=None, value=None, qset=None):
return qset.filter(pk__in=exchanges)
-
@classmethod
def nsp_namespace_in_list(cls):
return str(cls.id)
@@ -1239,7 +1301,9 @@ def nsp_namespace_from_id(cls, org_id, ix_id):
Returns permissioning namespace for an exchange
"""
return "%s.internetexchange.%s" % (
- Organization.nsp_namespace_from_id(org_id), ix_id)
+ Organization.nsp_namespace_from_id(org_id),
+ ix_id,
+ )
@property
def networks(self):
@@ -1282,16 +1346,18 @@ def ixlan_set_active_or_pending(self):
Returns queryset of active or pending ixlan objects at
this exchange
"""
- return self.ixlan_set(manager="handleref").filter(
- status__in=["ok", "pending"])
+ return self.ixlan_set(manager="handleref").filter(status__in=["ok", "pending"])
@property
def ixfac_set_active(self):
"""
Returns queryset of active ixfac objects at this exchange
"""
- return self.ixfac_set(manager="handleref").select_related(
- "facility").filter(status="ok")
+ return (
+ self.ixfac_set(manager="handleref")
+ .select_related("facility")
+ .filter(status="ok")
+ )
@property
def nsp_namespace(self):
@@ -1341,33 +1407,28 @@ def descriptive_name(self):
"""
Returns a descriptive label of the ixfac for logging purposes
"""
- return "ixfac{} {} <-> {}".format(
- self.id, self.ix.name, self.facility.name)
-
-
+ return "ixfac{} {} <-> {}".format(self.id, self.ix.name, self.facility.name)
@classmethod
def nsp_namespace_from_id(cls, org_id, ix_id, id):
"""
Returns permissioning namespace for an ixfac
"""
- return "%s.fac.%s" % (
- InternetExchange.nsp_namespace_from_id(org_id, ix_id), id)
+ return "%s.fac.%s" % (InternetExchange.nsp_namespace_from_id(org_id, ix_id), id)
@property
def nsp_namespace(self):
"""
Returns permissioning namespace for this ixfac
"""
- return self.__class__.nsp_namespace_from_id(self.ix.org_id, self.ix.id,
- self.id)
+ return self.__class__.nsp_namespace_from_id(self.ix.org_id, self.ix.id, self.id)
def nsp_has_perms_PUT(self, user, request):
return validate_PUT_ownership(user, self, request.data, ["ix"])
class Meta:
- unique_together = ('ix', 'facility')
- db_table = u'peeringdb_ix_facility'
+ unique_together = ("ix", "facility")
+ db_table = u"peeringdb_ix_facility"
@reversion.register
@@ -1376,8 +1437,7 @@ class IXLan(pdb_models.IXLanBase):
Describes a LAN at an exchange
"""
- ix = models.ForeignKey(InternetExchange, default=0,
- related_name="ixlan_set")
+ ix = models.ForeignKey(InternetExchange, default=0, related_name="ixlan_set")
ixf_ixp_member_list_url = models.URLField(null=True, blank=True)
ixf_ixp_import_enabled = models.BooleanField(default=False)
@@ -1388,16 +1448,14 @@ class HandleRef:
delete_cascade = ["ixpfx_set", "netixlan_set"]
class Meta:
- db_table = u'peeringdb_ixlan'
+ db_table = u"peeringdb_ixlan"
@property
def descriptive_name(self):
"""
Returns a descriptive label of the ixlan for logging purposes
"""
- return "ixlan{} {} {}".format(
- self.id, self.name, self.ix.name)
-
+ return "ixlan{} {} {}".format(self.id, self.name, self.ix.name)
@classmethod
def nsp_namespace_from_id(cls, org_id, ix_id, id):
@@ -1405,15 +1463,16 @@ def nsp_namespace_from_id(cls, org_id, ix_id, id):
Returns permissioning namespace for an ixlan
"""
return "%s.ixlan.%s" % (
- InternetExchange.nsp_namespace_from_id(org_id, ix_id), id)
+ InternetExchange.nsp_namespace_from_id(org_id, ix_id),
+ id,
+ )
@property
def nsp_namespace(self):
"""
Returns permissioning namespace for this ixlan
"""
- return self.__class__.nsp_namespace_from_id(self.ix.org_id, self.ix_id,
- self.id)
+ return self.__class__.nsp_namespace_from_id(self.ix.org_id, self.ix_id, self.id)
@property
def ixpfx_set_active(self):
@@ -1427,16 +1486,18 @@ def ixpfx_set_active_or_pending(self):
"""
Returns queryset of active or pending prefixes at this ixlan
"""
- return self.ixpfx_set(manager="handleref").filter(
- status__in=["ok", "pending"])
+ return self.ixpfx_set(manager="handleref").filter(status__in=["ok", "pending"])
@property
def netixlan_set_active(self):
"""
Returns queryset of active netixlan objects at this ixlan
"""
- return self.netixlan_set(
- manager="handleref").select_related("network").filter(status="ok")
+ return (
+ self.netixlan_set(manager="handleref")
+ .select_related("network")
+ .filter(status="ok")
+ )
# q = NetworkIXLan.handleref.filter(ixlan_id=self.id).filter(status="ok")
# return Network.handleref.filter(id__in=[i.network_id for i in
# q]).filter(status="ok")
@@ -1453,7 +1514,6 @@ def test_ipv4_address(self, ipv4):
return True
return False
-
def test_ipv6_address(self, ipv6):
"""
test that the ipv6 address exists in one of the prefixes in this ixlan
@@ -1463,7 +1523,6 @@ def test_ipv6_address(self, ipv6):
return True
return False
-
@reversion.create_revision()
def add_netixlan(self, netixlan_info, save=True, save_others=True):
"""
@@ -1496,8 +1555,12 @@ def add_netixlan(self, netixlan_info, save=True, save_others=True):
ipv6_valid = False
def result(netixlan=None):
- return { "netixlan": netixlan, "created": created,
- "changed": changed, "log":log}
+ return {
+ "netixlan": netixlan,
+ "created": created,
+ "changed": changed,
+ "log": log,
+ }
# check if either of the provided ip addresses are a fit for ANY of
# the prefixes in this ixlan
@@ -1511,24 +1574,31 @@ def result(netixlan=None):
# and bail
if (ipv4 and not ipv4_valid) or (ipv6 and not ipv6_valid):
log.append(
- "Ip addresses ({}, {}) do not match any prefix " \
- "on this ixlan".format(ipv4, ipv6))
+ "Ip addresses ({}, {}) do not match any prefix "
+ "on this ixlan".format(ipv4, ipv6)
+ )
return result()
# Next we check if an active netixlan with the ipaddress exists in ANOTHER lan, and bail
# if it does.
- if ipv4 and \
- NetworkIXLan.objects.filter(status="ok", ipaddr4=ipv4).exclude(ixlan=self).count() > 0:
- log.append(
- "Ip address {} already exists in another lan".
- format(ipv4))
+ if (
+ ipv4
+ and NetworkIXLan.objects.filter(status="ok", ipaddr4=ipv4)
+ .exclude(ixlan=self)
+ .count()
+ > 0
+ ):
+ log.append("Ip address {} already exists in another lan".format(ipv4))
return result()
- if ipv6 and \
- NetworkIXLan.objects.filter(status="ok", ipaddr6=ipv6).exclude(ixlan=self).count() > 0:
- log.append(
- "Ip address {} already exists in another lan".
- format(ipv6))
+ if (
+ ipv6
+ and NetworkIXLan.objects.filter(status="ok", ipaddr6=ipv6)
+ .exclude(ixlan=self)
+ .count()
+ > 0
+ ):
+ log.append("Ip address {} already exists in another lan".format(ipv6))
return result()
# now we need to figure out if the ipaddresses already exist in this ixlan,
@@ -1537,7 +1607,8 @@ def result(netixlan=None):
try:
if ipv4:
netixlan_existing_v4 = NetworkIXLan.objects.get(
- status="ok", ixlan=self, ipaddr4=ipv4)
+ status="ok", ixlan=self, ipaddr4=ipv4
+ )
else:
netixlan_existing_v4 = None
except NetworkIXLan.DoesNotExist:
@@ -1546,7 +1617,8 @@ def result(netixlan=None):
try:
if ipv6:
netixlan_existing_v6 = NetworkIXLan.objects.get(
- status="ok", ixlan=self, ipaddr6=ipv6)
+ status="ok", ixlan=self, ipaddr6=ipv6
+ )
else:
netixlan_existing_v6 = None
except NetworkIXLan.DoesNotExist:
@@ -1570,8 +1642,9 @@ def result(netixlan=None):
netixlan = netixlan_existing_v6
else:
# neither address exists, create a new netixlan object
- netixlan = NetworkIXLan(ixlan=self, network=netixlan_info.network,
- status="ok")
+ netixlan = NetworkIXLan(
+ ixlan=self, network=netixlan_info.network, status="ok"
+ )
created = True
reason = "New ip-address"
@@ -1581,17 +1654,18 @@ def result(netixlan=None):
if ipv4 != netixlan.ipaddr4:
# we need to check if this ipaddress exists on a soft-deleted netixlan elsewhere, and
# reset if so.
- for other in \
- NetworkIXLan.objects.filter(ipaddr4=ipv4, status="deleted").exclude(asn=asn):
+ for other in NetworkIXLan.objects.filter(
+ ipaddr4=ipv4, status="deleted"
+ ).exclude(asn=asn):
# FIXME: this is not practical until
# https://github.com/peeringdb/peeringdb/issues/90 is resolved
# so skipping for now
continue
- #other.ipaddr4 = None
- #other.notes = "Ip address {} was claimed by other netixlan".format(
+ # other.ipaddr4 = None
+ # other.notes = "Ip address {} was claimed by other netixlan".format(
# ipv4)
- #if save or save_others:
+ # if save or save_others:
# other.save()
netixlan.ipaddr4 = ipv4
@@ -1601,17 +1675,18 @@ def result(netixlan=None):
if ipv6 != netixlan.ipaddr6:
# we need to check if this ipaddress exists on a soft-deleted netixlan elsewhere, and
# reset if so.
- for other in \
- NetworkIXLan.objects.filter(ipaddr6=ipv6, status="deleted").exclude(asn=asn):
+ for other in NetworkIXLan.objects.filter(
+ ipaddr6=ipv6, status="deleted"
+ ).exclude(asn=asn):
# FIXME: this is not practical until
# https://github.com/peeringdb/peeringdb/issues/90 is resolved
# so skipping for now
continue
- #other.ipaddr6 = None
- #other.notes = "Ip address {} was claimed by other netixlan".format(
+ # other.ipaddr6 = None
+ # other.notes = "Ip address {} was claimed by other netixlan".format(
# ipv6)
- #if save or save_others:
+ # if save or save_others:
# other.save()
netixlan.ipaddr6 = ipv6
changed.append("ipaddr6")
@@ -1622,8 +1697,9 @@ def result(netixlan=None):
changed.append("is_rs_peer")
# Speed
- if netixlan_info.speed != netixlan.speed and \
- (netixlan_info.speed > 0 or netixlan.speed is None):
+ if netixlan_info.speed != netixlan.speed and (
+ netixlan_info.speed > 0 or netixlan.speed is None
+ ):
netixlan.speed = netixlan_info.speed
changed.append("speed")
@@ -1641,9 +1717,11 @@ def result(netixlan=None):
try:
netixlan.full_clean()
except Exception as inst:
- log.append("Validation Failure AS{} {} {}: {}".format(
- netixlan.network.asn, netixlan.ipaddr4, netixlan.ipaddr6,
- inst))
+ log.append(
+ "Validation Failure AS{} {} {}: {}".format(
+ netixlan.network.asn, netixlan.ipaddr4, netixlan.ipaddr6, inst
+ )
+ )
return result(None)
if save and changed:
@@ -1658,9 +1736,13 @@ class IXLanIXFMemberImportAttempt(models.Model):
Holds information about the most recent ixf member import
attempt for an ixlan
"""
- ixlan = models.OneToOneField(IXLan, on_delete=models.CASCADE,
- primary_key=True,
- related_name="ixf_import_attempt")
+
+ ixlan = models.OneToOneField(
+ IXLan,
+ on_delete=models.CASCADE,
+ primary_key=True,
+ related_name="ixf_import_attempt",
+ )
updated = models.DateTimeField(auto_now=True)
info = models.TextField(null=True, blank=True)
@@ -1670,6 +1752,7 @@ class IXLanIXFMemberImportLog(models.Model):
Import log of a IXF member import that changed or added at least one
netixlan under the specified ixlans
"""
+
ixlan = models.ForeignKey(IXLan, related_name="ixf_import_log_set")
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
@@ -1698,18 +1781,20 @@ class IXLanIXFMemberImportLogEntry(models.Model):
the netixlan's version after the change, which can be used to rollback
the change
"""
+
log = models.ForeignKey(IXLanIXFMemberImportLog, related_name="entries")
- netixlan = models.ForeignKey("peeringdb_server.NetworkIXLan",
- related_name="ixf_import_log_entries")
- version_before = models.ForeignKey(reversion.models.Version, null=True,
- related_name="ixf_import_log_before")
- version_after = models.ForeignKey(reversion.models.Version,
- related_name="ixf_import_log_after")
+ netixlan = models.ForeignKey(
+ "peeringdb_server.NetworkIXLan", related_name="ixf_import_log_entries"
+ )
+ version_before = models.ForeignKey(
+ reversion.models.Version, null=True, related_name="ixf_import_log_before"
+ )
+ version_after = models.ForeignKey(
+ reversion.models.Version, related_name="ixf_import_log_after"
+ )
action = models.CharField(max_length=255, null=True, blank=True)
reason = models.CharField(max_length=255, null=True, blank=True)
-
-
class Meta(object):
verbose_name = _("IXF Import Log Entry")
verbose_name_plural = _("IXF Import Log Entries")
@@ -1725,7 +1810,7 @@ def changes(self):
if not self.version_before:
return {}
data_before = self.version_before.field_dict
- data_after= self.version_after.field_dict
+ data_after = self.version_after.field_dict
rv = {}
for k, v in data_after.items():
if k in ["created", "updated", "version"]:
@@ -1733,18 +1818,18 @@ def changes(self):
v2 = data_before.get(k)
if v != v2:
if isinstance(v, ipaddress.IPv4Address) or isinstance(
- v, ipaddress.IPv6Address):
+ v, ipaddress.IPv6Address
+ ):
rv[k] = str(v)
else:
rv[k] = v
return rv
-
-
def rollback_status(self):
recent_version = reversion.models.Version.objects.get_for_object(
- self.netixlan).first()
+ self.netixlan
+ ).first()
if self.version_after == recent_version:
if self.netixlan.status == "deleted":
conflict_v4, conflict_v6 = self.netixlan.ipaddress_conflict()
@@ -1756,7 +1841,6 @@ def rollback_status(self):
return 1
-
# read only, or can make bigger, making smaller could break links
# validate could check
@@ -1774,9 +1858,7 @@ def descriptive_name(self):
"""
Returns a descriptive label of the ixpfx for logging purposes
"""
- return "ixpfx{} {}".format(
- self.id, self.prefix)
-
+ return "ixpfx{} {}".format(self.id, self.prefix)
@classmethod
def nsp_namespace_from_id(cls, org_id, ix_id, ixlan_id, id):
@@ -1784,7 +1866,9 @@ def nsp_namespace_from_id(cls, org_id, ix_id, ixlan_id, id):
Returns permissioning namespace for an ixpfx
"""
return "%s.prefix.%s" % (
- IXLan.nsp_namespace_from_id(org_id, ix_id, ixlan_id), id)
+ IXLan.nsp_namespace_from_id(org_id, ix_id, ixlan_id),
+ id,
+ )
@classmethod
def related_to_ix(cls, value=None, filt=None, field="ix_id", qset=None):
@@ -1805,7 +1889,8 @@ def nsp_namespace(self):
Returns permissioning namespace for this ixpfx
"""
return self.nsp_namespace_from_id(
- self.ixlan.ix.org_id, self.ixlan.ix.id, self.ixlan.id, self.id)
+ self.ixlan.ix.org_id, self.ixlan.ix.id, self.ixlan.id, self.id
+ )
def nsp_has_perms_PUT(self, user, request):
return validate_PUT_ownership(user, self, request.data, ["ixlan"])
@@ -1830,14 +1915,13 @@ def test_ip_address(self, addr):
return False
if isinstance(addr, unicode):
addr = ipaddress.ip_address(addr)
- return (addr in ipaddress.ip_network(self.prefix))
+ return addr in ipaddress.ip_network(self.prefix)
except ipaddress.AddressValueError:
return False
except ValueError as inst:
return False
-
def clean(self):
"""
Custom model validation
@@ -1856,9 +1940,12 @@ class Network(pdb_models.NetworkBase):
org = models.ForeignKey(Organization, related_name="net_set")
allow_ixp_update = models.BooleanField(
- null=False, default=False, help_text=
- _("Sepcifies whether an ixp is allowed to add a netixlan entry for this network via their ixp_member data"
- ))
+ null=False,
+ default=False,
+ help_text=_(
+ "Sepcifies whether an ixp is allowed to add a netixlan entry for this network via their ixp_member data"
+ ),
+ )
@staticmethod
def autocomplete_search_fields():
@@ -1880,20 +1967,17 @@ def create_from_rdap(cls, rdap, asn, org):
if not rdap.name:
name = "AS%d" % (asn)
if cls.objects.filter(name=name).exists():
- net = cls.objects.create(org=org, asn=asn, name="%s !" % name,
- status="ok")
+ net = cls.objects.create(org=org, asn=asn, name="%s !" % name, status="ok")
else:
net = cls.objects.create(org=org, asn=asn, name=name, status="ok")
return net, True
@classmethod
def nsp_namespace_from_id(cls, org_id, net_id):
- return "%s.network.%s" % (Organization.nsp_namespace_from_id(org_id),
- net_id)
+ return "%s.network.%s" % (Organization.nsp_namespace_from_id(org_id), net_id)
@classmethod
- def related_to_fac(cls, value=None, filt=None, field="facility_id",
- qset=None):
+ def related_to_fac(cls, value=None, filt=None, field="facility_id", qset=None):
"""
Filter queryset of Network objects related to the facility
specified by fac_id
@@ -1909,8 +1993,7 @@ def related_to_fac(cls, value=None, filt=None, field="facility_id",
return qset.filter(id__in=[i.network_id for i in q])
@classmethod
- def not_related_to_fac(cls, value=None, filt=None, field="facility_id",
- qset=None):
+ def not_related_to_fac(cls, value=None, filt=None, field="facility_id", qset=None):
"""
Filter queryset of Network objects NOT related to the facility
specified by fac_id (as in networks NOT present at the facility)
@@ -1957,8 +2040,7 @@ def related_to_netixlan(cls, value=None, filt=None, field="id", qset=None):
return qset.filter(id__in=[i.network_id for i in q])
@classmethod
- def related_to_ixlan(cls, value=None, filt=None, field="ixlan_id",
- qset=None):
+ def related_to_ixlan(cls, value=None, filt=None, field="ixlan_id", qset=None):
"""
Filter queryset of Network objects related to the ixlan
specified by ixlan_id
@@ -1990,8 +2072,7 @@ def related_to_ix(cls, value=None, filt=None, field="ix_id", qset=None):
return qset.filter(id__in=[i.network_id for i in q])
@classmethod
- def not_related_to_ix(cls, value=None, filt=None, field="ix_id",
- qset=None):
+ def not_related_to_ix(cls, value=None, filt=None, field="ix_id", qset=None):
"""
Filter queryset of Network objects not related to the ix
specified by ix_id (as in networks not present at the exchange)
@@ -2006,7 +2087,6 @@ def not_related_to_ix(cls, value=None, filt=None, field="ix_id",
q = NetworkIXLan.handleref.select_related("ixlan").filter(**filt)
return qset.exclude(id__in=[i.network_id for i in q])
-
@classmethod
def as_set_map(cls, qset=None):
"""
@@ -2055,7 +2135,6 @@ def ixlan_set_ixf_enabled(self):
qset = qset.exclude(ixf_ixp_member_list_url__isnull=True)
return qset
-
@property
def poc_set_active(self):
return self.poc_set(manager="handleref").filter(status="ok")
@@ -2077,22 +2156,15 @@ def nsp_ruleset(self):
"""
return {
-
# we require explicit perms to private network contacts
- "require": {
- "poc_set.users": 0x01,
- "poc_set.private": 0x01
- },
-
+ "require": {"poc_set.users": 0x01, "poc_set.private": 0x01},
# since poc are stored in a list we need to specify a list
# handler for it, its a class function on NetworkContact that
# returns a relative permission namespace for each poc in the
# list
"list-handlers": {
- "poc_set": {
- "namespace": NetworkContact.nsp_namespace_in_list
- }
- }
+ "poc_set": {"namespace": NetworkContact.nsp_namespace_in_list}
+ },
}
@property
@@ -2126,10 +2198,10 @@ class NetworkContact(pdb_models.ContactBase):
"""
# id = models.AutoField(primary_key=True)
- network = models.ForeignKey(Network, default=0, related_name='poc_set')
+ network = models.ForeignKey(Network, default=0, related_name="poc_set")
class Meta:
- db_table = u'peeringdb_network_contact'
+ db_table = u"peeringdb_network_contact"
@classmethod
def nsp_namespace_in_list(cls, **kwargs):
@@ -2149,8 +2221,7 @@ def nsp_namespace_from_id(cls, org_id, net_id, vis):
"""
Returns permissioning namespace for a network contact
"""
- return "%s.poc_set.%s" % (
- Network.nsp_namespace_from_id(org_id, net_id), vis)
+ return "%s.poc_set.%s" % (Network.nsp_namespace_from_id(org_id, net_id), vis)
@property
def nsp_namespace(self):
@@ -2158,7 +2229,8 @@ def nsp_namespace(self):
Returns a custom namespace for an instance of this model
"""
return self.__class__.nsp_namespace_from_id(
- self.network.org_id, self.network.id, self.visible)
+ self.network.org_id, self.network.id, self.visible
+ )
@property
def nsp_require_explicit_read(self):
@@ -2178,23 +2250,19 @@ class NetworkFacility(pdb_models.NetworkFacilityBase):
Describes a network <-> facility relationship
"""
- network = models.ForeignKey(Network, default=0, related_name='netfac_set')
- facility = models.ForeignKey(Facility, default=0,
- related_name='netfac_set')
+ network = models.ForeignKey(Network, default=0, related_name="netfac_set")
+ facility = models.ForeignKey(Facility, default=0, related_name="netfac_set")
class Meta:
- db_table = u'peeringdb_network_facility'
- unique_together = ('network', 'facility', 'local_asn')
-
-
+ db_table = u"peeringdb_network_facility"
+ unique_together = ("network", "facility", "local_asn")
@classmethod
def nsp_namespace_from_id(cls, org_id, net_id, fac_id):
"""
Returns permissioning namespace for a netfac
"""
- return "%s.fac.%s" % (Network.nsp_namespace_from_id(org_id, net_id),
- fac_id)
+ return "%s.fac.%s" % (Network.nsp_namespace_from_id(org_id, net_id), fac_id)
@property
def nsp_namespace(self):
@@ -2202,11 +2270,11 @@ def nsp_namespace(self):
Returns permissioning namespace for this netfac
"""
return self.__class__.nsp_namespace_from_id(
- self.network.org_id, self.network_id, self.facility_id)
+ self.network.org_id, self.network_id, self.facility_id
+ )
@classmethod
- def related_to_name(cls, value=None, filt=None, field="facility__name",
- qset=None):
+ def related_to_name(cls, value=None, filt=None, field="facility__name", qset=None):
"""
Filter queryset of netfac objects related to facilities with name match
in facility__name according to filter
@@ -2218,8 +2286,9 @@ def related_to_name(cls, value=None, filt=None, field="facility__name",
return qset.filter(**make_relation_filter(field, filt, value))
@classmethod
- def related_to_country(cls, value=None, filt=None,
- field="facility__country", qset=None):
+ def related_to_country(
+ cls, value=None, filt=None, field="facility__country", qset=None
+ ):
"""
Filter queryset of netfac objects related to country via match
in facility__country according to filter
@@ -2231,8 +2300,7 @@ def related_to_country(cls, value=None, filt=None,
return qset.filter(**make_relation_filter(field, filt, value))
@classmethod
- def related_to_city(cls, value=None, filt=None, field="facility__city",
- qset=None):
+ def related_to_city(cls, value=None, filt=None, field="facility__city", qset=None):
"""
Filter queryset of netfac objects related to city via match
in facility__city according to filter
@@ -2249,8 +2317,8 @@ def descriptive_name(self):
Returns a descriptive label of the netfac for logging purposes
"""
return "netfac{} AS{} {} <-> {}".format(
- self.id, self.network.asn, self.network.name, self.facility.name)
-
+ self.id, self.network.asn, self.network.name, self.facility.name
+ )
def nsp_has_perms_PUT(self, user, request):
return validate_PUT_ownership(user, self, request.data, ["net"])
@@ -2266,12 +2334,11 @@ class NetworkIXLan(pdb_models.NetworkIXLanBase):
Describes a network relationship to an IX through an IX Lan
"""
- network = models.ForeignKey(Network, default=0,
- related_name='netixlan_set')
- ixlan = models.ForeignKey(IXLan, default=0, related_name='netixlan_set')
+ network = models.ForeignKey(Network, default=0, related_name="netixlan_set")
+ ixlan = models.ForeignKey(IXLan, default=0, related_name="netixlan_set")
class Meta:
- db_table = u'peeringdb_network_ixlan'
+ db_table = u"peeringdb_network_ixlan"
@property
def name(self):
@@ -2283,7 +2350,8 @@ def descriptive_name(self):
Returns a descriptive label of the netixlan for logging purposes
"""
return "netixlan{} AS{} {} {}".format(
- self.id, self.asn, self.ipaddr4, self.ipaddr6)
+ self.id, self.asn, self.ipaddr4, self.ipaddr6
+ )
@property
def ix_name(self):
@@ -2309,8 +2377,7 @@ def nsp_namespace_from_id(cls, org_id, net_id, ixlan_id):
"""
Returns permissioning namespace for a netixlan
"""
- return "%s.ixlan.%s" % (Network.nsp_namespace_from_id(org_id, net_id),
- ixlan_id)
+ return "%s.ixlan.%s" % (Network.nsp_namespace_from_id(org_id, net_id), ixlan_id)
@property
def nsp_namespace(self):
@@ -2318,7 +2385,8 @@ def nsp_namespace(self):
Returns permissioning namespace for this netixlan
"""
return self.__class__.nsp_namespace_from_id(
- self.network.org_id, self.network.id, self.ixlan_id)
+ self.network.org_id, self.network.id, self.ixlan_id
+ )
def nsp_has_perms_PUT(self, user, request):
return validate_PUT_ownership(user, self, request.data, ["net"])
@@ -2341,16 +2409,14 @@ def related_to_ix(cls, value=None, filt=None, field="ix_id", qset=None):
return qset.filter(ixlan_id__in=[i.id for i in q])
@classmethod
- def related_to_name(cls, value=None, filt=None, field="ix__name",
- qset=None):
+ def related_to_name(cls, value=None, filt=None, field="ix__name", qset=None):
"""
Filter queryset of netixlan objects related to exchange via a name match
according to filter
Relationship through ixlan -> ix
"""
- return cls.related_to_ix(value=value, filt=filt, field=field,
- qset=qset)
+ return cls.related_to_ix(value=value, filt=filt, field=field, qset=qset)
def ipaddress_conflict(self):
"""
@@ -2364,15 +2430,16 @@ def ipaddress_conflict(self):
address
"""
- ipv4 = NetworkIXLan.objects.filter(ipaddr4=self.ipaddr4,
- status="ok").exclude(id=self.id)
- ipv6 = NetworkIXLan.objects.filter(ipaddr6=self.ipaddr6,
- status="ok").exclude(id=self.id)
- conflict_v4 = (self.ipaddr4 and ipv4.exists())
- conflict_v6 = (self.ipaddr6 and ipv6.exists())
+ ipv4 = NetworkIXLan.objects.filter(ipaddr4=self.ipaddr4, status="ok").exclude(
+ id=self.id
+ )
+ ipv6 = NetworkIXLan.objects.filter(ipaddr6=self.ipaddr6, status="ok").exclude(
+ id=self.id
+ )
+ conflict_v4 = self.ipaddr4 and ipv4.exists()
+ conflict_v6 = self.ipaddr6 and ipv6.exists()
return (conflict_v4, conflict_v6)
-
def validate_ipaddr4(self):
if self.ipaddr4 and not self.ixlan.test_ipv4_address(self.ipaddr4):
raise ValidationError(_("IPv4 address outside of prefix"))
@@ -2381,7 +2448,6 @@ def validate_ipaddr6(self):
if self.ipaddr6 and not self.ixlan.test_ipv6_address(self.ipaddr6):
raise ValidationError(_("IPv6 address outside of prefix"))
-
def clean(self):
"""
Custom model validation
@@ -2425,55 +2491,63 @@ def ipaddr(self, version):
return self.ipaddr6
raise ValueError("Invalid ip version {}".format(version))
-
def descriptive_name_ipv(self, version):
"""
Returns a descriptive label of the netixlan for logging purposes
Will only contain the ipaddress matching the specified version
"""
- return "netixlan{} AS{} {}".format(
- self.id, self.asn, self.ipaddr(version))
-
+ return "netixlan{} AS{} {}".format(self.id, self.asn, self.ipaddr(version))
class User(AbstractBaseUser, PermissionsMixin):
"""
proper length fields user
"""
+
username = models.CharField(
- _('username'), max_length=254, unique=True,
- help_text=_('Required. Letters, digits and [@.+-/_=|] only.'),
+ _("username"),
+ max_length=254,
+ unique=True,
+ help_text=_("Required. Letters, digits and [@.+-/_=|] only."),
validators=[
- validators.RegexValidator(r'^[\w\.@+-=|/]+$',
- _('Enter a valid username.'), 'invalid',
- flags=re.UNICODE)
- ])
- email = models.EmailField(_('email address'), max_length=254)
- first_name = models.CharField(_('first name'), max_length=254, blank=True)
- last_name = models.CharField(_('last name'), max_length=254, blank=True)
+ validators.RegexValidator(
+ r"^[\w\.@+-=|/]+$",
+ _("Enter a valid username."),
+ "invalid",
+ flags=re.UNICODE,
+ )
+ ],
+ )
+ email = models.EmailField(_("email address"), max_length=254)
+ first_name = models.CharField(_("first name"), max_length=254, blank=True)
+ last_name = models.CharField(_("last name"), max_length=254, blank=True)
is_staff = models.BooleanField(
- _('staff status'), default=False,
- help_text=_('Designates whether the user can log into admin site.'))
+ _("staff status"),
+ default=False,
+ help_text=_("Designates whether the user can log into admin site."),
+ )
is_active = models.BooleanField(
- _('active'), default=True, help_text=
- _('Designates whether this user should be treated as active. Unselect this instead of deleting accounts.'
- ))
- date_joined = models.DateTimeField(_('date joined'), default=timezone.now)
+ _("active"),
+ default=True,
+ help_text=_(
+ "Designates whether this user should be treated as active. Unselect this instead of deleting accounts."
+ ),
+ )
+ date_joined = models.DateTimeField(_("date joined"), default=timezone.now)
created = CreatedDateTimeField()
updated = UpdatedDateTimeField()
- status = models.CharField(_('status'), max_length=254, default="ok")
- locale = models.CharField(
- _('language'), max_length=62, blank=True, null=True)
+ status = models.CharField(_("status"), max_length=254, default="ok")
+ locale = models.CharField(_("language"), max_length=62, blank=True, null=True)
objects = UserManager()
- USERNAME_FIELD = 'username'
- REQUIRED_FIELDS = ['email']
+ USERNAME_FIELD = "username"
+ REQUIRED_FIELDS = ["email"]
class Meta:
db_table = "peeringdb_user"
- verbose_name = _('user')
- verbose_name_plural = _('users')
+ verbose_name = _("user")
+ verbose_name_plural = _("users")
@property
def organizations(self):
@@ -2486,9 +2560,7 @@ def organizations(self):
if m and int(m.group(1)) not in ids:
ids.append(int(m.group(1)))
- return [
- org for org in Organization.objects.filter(id__in=ids, status="ok")
- ]
+ return [org for org in Organization.objects.filter(id__in=ids, status="ok")]
@property
def networks(self):
@@ -2496,8 +2568,8 @@ def networks(self):
Returns all networks this user is a member of
"""
return list(
- chain.from_iterable(
- org.net_set_active.all() for org in self.organizations))
+ chain.from_iterable(org.net_set_active.all() for org in self.organizations)
+ )
@property
def full_name(self):
@@ -2515,8 +2587,7 @@ def email_confirmed(self):
"""
try:
- email = EmailAddress.objects.get(user=self, email=self.email,
- primary=True)
+ email = EmailAddress.objects.get(user=self, email=self.email, primary=True)
except EmailAddress.DoesNotExist:
return False
@@ -2551,25 +2622,25 @@ def get_full_name(self):
"""
Returns the first_name plus the last_name, with a space in between.
"""
- full_name = '%s %s' % (self.first_name, self.last_name)
+ full_name = "%s %s" % (self.first_name, self.last_name)
return full_name.strip()
def get_short_name(self):
"Returns the short name for the user."
return self.first_name
- def email_user(self, subject, message,
- from_email=settings.DEFAULT_FROM_EMAIL):
+ def email_user(self, subject, message, from_email=settings.DEFAULT_FROM_EMAIL):
"""
Sends an email to this User.
"""
if not getattr(settings, "MAIL_DEBUG", False):
mail = EmailMultiAlternatives(
- subject, message, from_email,
- [self.email], headers={
- "Auto-Submitted": "auto-generated",
- "Return-Path": "<>"
- })
+ subject,
+ message,
+ from_email,
+ [self.email],
+ headers={"Auto-Submitted": "auto-generated", "Return-Path": "<>"},
+ )
mail.send(fail_silently=False)
else:
debug_mail(subject, message, from_email, [self.email])
@@ -2649,7 +2720,7 @@ def password_reset_initiate(self):
Initiate the password reset process for the user
"""
- #pylint: disable=access-member-before-definition
+ # pylint: disable=access-member-before-definition
if self.id:
@@ -2661,16 +2732,20 @@ def password_reset_initiate(self):
token, hashed = password_reset_token()
self.password_reset = UserPasswordReset.objects.create(
- user=self, token=hashed)
+ user=self, token=hashed
+ )
template = loader.get_template("email/password-reset.txt")
with override(self.locale):
self.email_user(
_("Password Reset Initiated"),
- template.render({
- "user": self,
- "token": token,
- "password_reset_url": settings.PASSWORD_RESET_URL
- }))
+ template.render(
+ {
+ "user": self,
+ "token": token,
+ "password_reset_url": settings.PASSWORD_RESET_URL,
+ }
+ ),
+ )
return token, hashed
return None, None
@@ -2713,8 +2788,7 @@ class UserPasswordReset(models.Model):
class Meta:
db_table = "peeringdb_user_password_reset"
- user = models.OneToOneField(User, primary_key=True,
- related_name="password_reset")
+ user = models.OneToOneField(User, primary_key=True, related_name="password_reset")
token = models.CharField(max_length=255)
created = models.DateTimeField(auto_now_add=True)
@@ -2733,27 +2807,36 @@ class CommandLineTool(models.Model):
Describes command line tool execution by a staff user inside the
control panel (admin)
"""
- tool = models.CharField(max_length=255, help_text=_("name of the tool"),
- choices=COMMANDLINE_TOOLS)
+
+ tool = models.CharField(
+ max_length=255, help_text=_("name of the tool"), choices=COMMANDLINE_TOOLS
+ )
arguments = models.TextField(
- help_text=_("json serialization of arguments and options passed"))
+ help_text=_("json serialization of arguments and options passed")
+ )
result = models.TextField(null=True, blank=True, help_text=_("result log"))
description = models.CharField(
max_length=255,
help_text=_("Descriptive text of command that can be searched"),
- null=True, blank=True)
- user = models.ForeignKey(User,
- help_text=_("the user that ran this command"),
- related_name="clt_history")
+ null=True,
+ blank=True,
+ )
+ user = models.ForeignKey(
+ User, help_text=_("the user that ran this command"), related_name="clt_history"
+ )
created = models.DateTimeField(
- auto_now_add=True,
- help_text=_("command was run at this date and time"))
+ auto_now_add=True, help_text=_("command was run at this date and time")
+ )
- status = models.CharField(max_length=255, default="done", choices=[
- ("done", _("Done")),
- ("waiting", _("Waiting")),
- ("running", _("Running"))
- ])
+ status = models.CharField(
+ max_length=255,
+ default="done",
+ choices=[
+ ("done", _("Done")),
+ ("waiting", _("Waiting")),
+ ("running", _("Running")),
+ ],
+ )
def __str__(self):
return "{}: {}".format(self.tool, self.description)
@@ -2768,13 +2851,23 @@ def set_running(self):
self.status = "running"
-
-REFTAG_MAP = dict([(cls.handleref.tag, cls)
- for cls in [
- Organization, Network, Facility, InternetExchange,
- InternetExchangeFacility, NetworkFacility, NetworkIXLan,
- NetworkContact, IXLan, IXLanPrefix
- ]])
+REFTAG_MAP = dict(
+ [
+ (cls.handleref.tag, cls)
+ for cls in [
+ Organization,
+ Network,
+ Facility,
+ InternetExchange,
+ InternetExchangeFacility,
+ NetworkFacility,
+ NetworkIXLan,
+ NetworkContact,
+ IXLan,
+ IXLanPrefix,
+ ]
+ ]
+)
QUEUE_ENABLED = []
diff --git a/peeringdb_server/org_admin_views.py b/peeringdb_server/org_admin_views.py
index 319322ef..c248ed75 100644
--- a/peeringdb_server/org_admin_views.py
+++ b/peeringdb_server/org_admin_views.py
@@ -9,8 +9,14 @@
from forms import OrgAdminUserPermissionForm
from peeringdb_server.models import (
- User, Organization, Network, NetworkContact, InternetExchange, Facility,
- UserOrgAffiliationRequest)
+ User,
+ Organization,
+ Network,
+ NetworkContact,
+ InternetExchange,
+ Facility,
+ UserOrgAffiliationRequest,
+)
import django_namespace_perms.util as nsp
from django_namespace_perms.constants import *
@@ -31,8 +37,7 @@ def save_user_permissions(org, user, perms):
# wipe all the user's perms for the targeted org
- user.userpermission_set.filter(
- namespace__startswith=org.nsp_namespace).delete()
+ user.userpermission_set.filter(namespace__startswith=org.nsp_namespace).delete()
# collect permissioning namespaces from the provided permissioning ids
@@ -45,32 +50,37 @@ def save_user_permissions(org, user, perms):
if id == "org.%d" % org.id:
nsp_perms[org.nsp_namespace] = permissions
- nsp_perms[NetworkContact.nsp_namespace_from_id(
- org.id, "*", "private")] = permissions
+ nsp_perms[
+ NetworkContact.nsp_namespace_from_id(org.id, "*", "private")
+ ] = permissions
elif id == "net":
- nsp_perms[Network.nsp_namespace_from_id(
- org.id, "*").strip(".*")] = permissions
- nsp_perms[NetworkContact.nsp_namespace_from_id(
- org.id, "*", "private")] = permissions
+ nsp_perms[
+ Network.nsp_namespace_from_id(org.id, "*").strip(".*")
+ ] = permissions
+ nsp_perms[
+ NetworkContact.nsp_namespace_from_id(org.id, "*", "private")
+ ] = permissions
elif id == "ix":
- nsp_perms[InternetExchange.nsp_namespace_from_id(
- org.id, "*").strip(".*")] = permissions
+ nsp_perms[
+ InternetExchange.nsp_namespace_from_id(org.id, "*").strip(".*")
+ ] = permissions
elif id == "fac":
- nsp_perms[Facility.nsp_namespace_from_id(
- org.id, "*").strip(".*")] = permissions
+ nsp_perms[
+ Facility.nsp_namespace_from_id(org.id, "*").strip(".*")
+ ] = permissions
elif id.find(".") > -1:
id = id.split(".")
if id[0] == "net":
- nsp_perms[Network.nsp_namespace_from_id(org.id,
- id[1])] = permissions
- nsp_perms[NetworkContact.nsp_namespace_from_id(
- org.id, id[1], "private")] = permissions
+ nsp_perms[Network.nsp_namespace_from_id(org.id, id[1])] = permissions
+ nsp_perms[
+ NetworkContact.nsp_namespace_from_id(org.id, id[1], "private")
+ ] = permissions
elif id[0] == "ix":
- nsp_perms[InternetExchange.nsp_namespace_from_id(
- org.id, id[1])] = permissions
+ nsp_perms[
+ InternetExchange.nsp_namespace_from_id(org.id, id[1])
+ ] = permissions
elif id[0] == "fac":
- nsp_perms[Facility.nsp_namespace_from_id(org.id,
- id[1])] = permissions
+ nsp_perms[Facility.nsp_namespace_from_id(org.id, id[1])] = permissions
# save
for ns, p in nsp_perms.items():
@@ -91,7 +101,7 @@ def load_all_user_permissions(org):
rv[user.id] = {
"id": user.id,
"perms": perms,
- "name": "%s <%s> %s" % (user.full_name, user.email, user.username)
+ "name": "%s <%s> %s" % (user.full_name, user.email, user.username),
}
return rv
@@ -102,9 +112,14 @@ def load_user_permissions(org, user):
"""
# load all of the user's permissions related to this org
- uperms = dict([(p.namespace, p.permissions)
- for p in user.userpermission_set.filter(
- namespace__startswith=org.nsp_namespace)])
+ uperms = dict(
+ [
+ (p.namespace, p.permissions)
+ for p in user.userpermission_set.filter(
+ namespace__startswith=org.nsp_namespace
+ )
+ ]
+ )
perms = {}
@@ -140,23 +155,41 @@ def permission_ids(org):
"org.%d" % org.id: _("Organization and all Entities it owns"),
"net": _("Any Network"),
"fac": _("Any Facility"),
- "ix": _("Any Exchange")
+ "ix": _("Any Exchange"),
}
perms.update(
- dict([("net.%d" % net.id, _("Network - %(net_name)s") % {
- 'net_name': net.name
- }) for net in org.net_set_active]))
+ dict(
+ [
+ (
+ "net.%d" % net.id,
+ _("Network - %(net_name)s") % {"net_name": net.name},
+ )
+ for net in org.net_set_active
+ ]
+ )
+ )
perms.update(
- dict([("ix.%d" % ix.id, _("Exchange - %(ix_name)s") % {
- 'ix_name': ix.name
- }) for ix in org.ix_set_active]))
+ dict(
+ [
+ ("ix.%d" % ix.id, _("Exchange - %(ix_name)s") % {"ix_name": ix.name})
+ for ix in org.ix_set_active
+ ]
+ )
+ )
perms.update(
- dict([("fac.%d" % fac.id, _("Facility - %(fac_name)s") % {
- 'fac_name': fac.name
- }) for fac in org.fac_set_active]))
+ dict(
+ [
+ (
+ "fac.%d" % fac.id,
+ _("Facility - %(fac_name)s") % {"fac_name": fac.name},
+ )
+ for fac in org.fac_set_active
+ ]
+ )
+ )
return perms
@@ -206,15 +239,14 @@ def callback(request, **kwargs):
try:
org = Organization.objects.get(id=org_id)
- if not nsp.has_perms(request.user, org.nsp_namespace_manage,
- "update"):
+ if not nsp.has_perms(request.user, org.nsp_namespace_manage, "update"):
return JsonResponse({}, status=403)
kwargs["org"] = org
return fnc(request, **kwargs)
except Organization.DoesNotExist:
- return JsonResponse({
- "non_field_errors": [_("Invalid organization specified")]
- }, status=400)
+ return JsonResponse(
+ {"non_field_errors": [_("Invalid organization specified")]}, status=400
+ )
return callback
@@ -261,10 +293,13 @@ def users(request, **kwargs):
org = kwargs.get("org")
rv = {
- "users": [{
- "id": user.id,
- "name": "%s <%s, %s>" % (user.full_name, user.email, user.username)
- } for user in org.usergroup.user_set.all()]
+ "users": [
+ {
+ "id": user.id,
+ "name": "%s <%s, %s>" % (user.full_name, user.email, user.username),
+ }
+ for user in org.usergroup.user_set.all()
+ ]
}
rv.update({"status": "ok"})
@@ -305,9 +340,7 @@ def manage_user_update(request, **kwargs):
user = kwargs.get("user")
group = request.POST.get("group")
if group not in ["member", "admin"]:
- return JsonResponse({
- "group": _("Needs to be member or admin")
- }, status=400)
+ return JsonResponse({"group": _("Needs to be member or admin")}, status=400)
if group == "admin":
org.usergroup.user_set.remove(user)
@@ -407,10 +440,7 @@ def permissions(request, **kwargs):
org = kwargs.get("org")
- perms = [{
- "id": id,
- "name": name
- } for id, name in permission_ids(org).items()]
+ perms = [{"id": id, "name": name} for id, name in permission_ids(org).items()]
perms = sorted(perms, key=lambda x: x.get("name"))
return JsonResponse({"status": "ok", "permissions": perms})
@@ -445,23 +475,28 @@ def uoar_approve(request, **kwargs):
if admin_user != request.user:
with override(admin_user.locale):
admin_user.email_user(
- _("%(user_name)s's afilliation request has been approved"
- ) % {'user_name': uoar.user.full_name},
+ _("%(user_name)s's afilliation request has been approved")
+ % {"user_name": uoar.user.full_name},
loader.get_template(
- 'email/notify-org-admin-user-affil-approved.txt')
- .render({
- "user": request.user,
- "uoar": uoar,
- "org_management_url": '%s/org/%d#users' %
- (settings.BASE_URL, org.id)
- }))
-
- return JsonResponse({
- "status": "ok",
- "full_name": user.full_name,
- "id": user.id,
- "email": user.email
- })
+ "email/notify-org-admin-user-affil-approved.txt"
+ ).render(
+ {
+ "user": request.user,
+ "uoar": uoar,
+ "org_management_url": "%s/org/%d#users"
+ % (settings.BASE_URL, org.id),
+ }
+ ),
+ )
+
+ return JsonResponse(
+ {
+ "status": "ok",
+ "full_name": user.full_name,
+ "id": user.id,
+ "email": user.email,
+ }
+ )
except UserOrgAffiliationRequest.DoesNotExist:
return JsonResponse({"status": "ok"})
@@ -499,16 +534,19 @@ def uoar_deny(request, **kwargs):
if user != request.user:
with override(user.locale):
user.email_user(
- _("%(user_name)s's afilliation request has been denied"
- ) % {'user_name': uoar.user.full_name},
+ _("%(user_name)s's afilliation request has been denied")
+ % {"user_name": uoar.user.full_name},
loader.get_template(
- 'email/notify-org-admin-user-affil-denied.txt')
- .render({
- "user": request.user,
- "uoar": uoar,
- "org_management_url": '%s/org/%d#users' %
- (settings.BASE_URL, org.id)
- }))
+ "email/notify-org-admin-user-affil-denied.txt"
+ ).render(
+ {
+ "user": request.user,
+ "uoar": uoar,
+ "org_management_url": "%s/org/%d#users"
+ % (settings.BASE_URL, org.id),
+ }
+ ),
+ )
except UserOrgAffiliationRequest.DoesNotExist:
return JsonResponse({"status": "ok"})
diff --git a/peeringdb_server/renderers.py b/peeringdb_server/renderers.py
index 9fb1a564..1c9c49f1 100644
--- a/peeringdb_server/renderers.py
+++ b/peeringdb_server/renderers.py
@@ -30,15 +30,15 @@ def default(self, obj):
class MungeRenderer(renderers.BaseRenderer):
- media_type = 'text/plain'
- format = 'txt'
- charset = 'iso-8859-1'
+ media_type = "text/plain"
+ format = "txt"
+ charset = "iso-8859-1"
def render(self, data, media_type=None, renderer_context=None):
# TODO use munge:
indent = None
- if 'request' in renderer_context:
- request = renderer_context.get('request')
+ if "request" in renderer_context:
+ request = renderer_context.get("request")
if "pretty" in request.GET:
indent = 2
return json.dumps(data, cls=JSONEncoder, indent=indent)
@@ -49,6 +49,7 @@ class MetaJSONRenderer(MungeRenderer):
Renderer which serializes to JSON.
Does *not* apply JSON's character escaping for non-ascii characters.
"""
+
ensure_ascii = False
media_type = "application/json"
@@ -64,19 +65,19 @@ def render(self, data, accepted_media_type=None, renderer_context=None):
result = {}
- if '__meta' in data:
- meta = data.pop('__meta')
+ if "__meta" in data:
+ meta = data.pop("__meta")
else:
meta = dict()
- if 'request' in renderer_context:
- request = renderer_context.get('request')
- meta.update(getattr(request, 'meta_response', {}))
+ if "request" in renderer_context:
+ request = renderer_context.get("request")
+ meta.update(getattr(request, "meta_response", {}))
- res = renderer_context['response']
+ res = renderer_context["response"]
if res.status_code < 400:
- if 'results' in data:
- result['data'] = data.pop('results')
+ if "results" in data:
+ result["data"] = data.pop("results")
elif data:
if isinstance(data, dict):
result["data"] = [data]
@@ -86,10 +87,11 @@ def render(self, data, accepted_media_type=None, renderer_context=None):
result["data"] = []
elif res.status_code < 500:
- meta['error'] = data.pop('detail', 'Unknown')
+ meta["error"] = data.pop("detail", "Unknown")
result.update(**data)
- result['meta'] = meta
+ result["meta"] = meta
- return super(self.__class__, self).render(result, accepted_media_type,
- renderer_context)
+ return super(self.__class__, self).render(
+ result, accepted_media_type, renderer_context
+ )
diff --git a/peeringdb_server/rest.py b/peeringdb_server/rest.py
index f572eac6..9bd76b36 100644
--- a/peeringdb_server/rest.py
+++ b/peeringdb_server/rest.py
@@ -2,7 +2,7 @@
import unidecode
-from rest_framework import (routers, serializers, status, viewsets)
+from rest_framework import routers, serializers, status, viewsets
from rest_framework.response import Response
from rest_framework.views import exception_handler
from rest_framework.exceptions import ValidationError as RestValidationError
@@ -15,7 +15,7 @@
from django.db.models import DateTimeField
import django_namespace_perms.rest as nsp_rest
-from peeringdb_server.models import (Network)
+from peeringdb_server.models import Network
from peeringdb_server.serializers import ParentStatusException
from peeringdb_server.api_cache import CacheRedirect, APICacheLoader
@@ -38,32 +38,42 @@ class RestRouter(routers.DefaultRouter):
routes = [
# List route.
- routers.Route(url=r'^{prefix}{trailing_slash}$', mapping={
- 'get': 'list',
- 'post': 'create'
- }, name='{basename}-list', initkwargs={'suffix': 'List'}),
+ routers.Route(
+ url=r"^{prefix}{trailing_slash}$",
+ mapping={"get": "list", "post": "create"},
+ name="{basename}-list",
+ initkwargs={"suffix": "List"},
+ ),
# Detail route.
routers.Route(
- url=r'^{prefix}/{lookup}{trailing_slash}$', mapping={
- 'get': 'retrieve',
- 'put': 'update',
- 'patch': 'partial_update',
- 'delete': 'destroy'
- }, name='{basename}-detail', initkwargs={'suffix': 'Instance'}),
+ url=r"^{prefix}/{lookup}{trailing_slash}$",
+ mapping={
+ "get": "retrieve",
+ "put": "update",
+ "patch": "partial_update",
+ "delete": "destroy",
+ },
+ name="{basename}-detail",
+ initkwargs={"suffix": "Instance"},
+ ),
routers.DynamicDetailRoute(
- url=r'^{prefix}/{lookup}/{methodnamehyphen}$',
- name='{basename}-{methodnamehyphen}', initkwargs={}),
+ url=r"^{prefix}/{lookup}/{methodnamehyphen}$",
+ name="{basename}-{methodnamehyphen}",
+ initkwargs={},
+ ),
# Dynamically generated routes.
# Generated using @action or @link decorators on methods of the
# viewset.
- routers.Route(url=r'^{prefix}/{lookup}/{methodname}{trailing_slash}$',
- mapping={
- '{httpmethod}': '{methodname}',
- }, name='{basename}-{methodnamehyphen}', initkwargs={}),
+ routers.Route(
+ url=r"^{prefix}/{lookup}/{methodname}{trailing_slash}$",
+ mapping={"{httpmethod}": "{methodname}",},
+ name="{basename}-{methodnamehyphen}",
+ initkwargs={},
+ ),
]
def __init__(self, trailing_slash=False):
- self.trailing_slash = trailing_slash and '/' or ''
+ self.trailing_slash = trailing_slash and "/" or ""
super(routers.DefaultRouter, self).__init__(trailing_slash=False)
@@ -100,8 +110,9 @@ def wrapped(self, request, *args, **kwargs):
try:
compat_check(request)
except ValueError as exc:
- return Response(status=status.HTTP_400_BAD_REQUEST,
- data={"detail": str(exc)})
+ return Response(
+ status=status.HTTP_400_BAD_REQUEST, data={"detail": str(exc)}
+ )
return fn(self, request, *args, **kwargs)
@@ -114,7 +125,7 @@ def version_tuple(self, str_version):
def version_pad(self, version):
""" take a semantic version tuple and zero pad to dev version """
while len(version) < 4:
- version = version + (0, )
+ version = version + (0,)
return version
def version_string(self, version):
@@ -155,8 +166,8 @@ def client_info(self, request):
"client": self.version_tuple(m.group(1)),
"backend": {
"name": m.group(2),
- "version": self.version_tuple(m.group(3))
- }
+ "version": self.version_tuple(m.group(3)),
+ },
}
return {}
@@ -184,32 +195,30 @@ def compat_check(self, request):
client_version = info.get("client")
backend_version = info.get("backend").get("version")
- if self.version_pad(
- self.min_version) > self.version_pad(client_version):
+ if self.version_pad(self.min_version) > self.version_pad(client_version):
# client version is too low
compat = False
- elif self.version_pad(
- self.max_version) < self.version_pad(client_version):
+ elif self.version_pad(self.max_version) < self.version_pad(client_version):
# client version is too high
compat = False
- if self.version_pad(backend_min) > self.version_pad(
- backend_version):
+ if self.version_pad(backend_min) > self.version_pad(backend_version):
# client backend version is too low
compat = False
- elif self.version_pad(backend_max) < self.version_pad(
- backend_version):
+ elif self.version_pad(backend_max) < self.version_pad(backend_version):
# client backend version is too high
compat = False
if not compat:
raise ValueError(
- "Your client version is incompatible with server version of the api, please install peeringdb>={},<={} {}>={},<={}"
- .format(
+ "Your client version is incompatible with server version of the api, please install peeringdb>={},<={} {}>={},<={}".format(
self.version_string(self.min_version),
- self.version_string(self.max_version), backend,
+ self.version_string(self.max_version),
+ backend,
self.version_string(backend_min),
- self.version_string(backend_max)))
+ self.version_string(backend_max),
+ )
+ )
###############################################################################
@@ -222,12 +231,13 @@ class ModelViewSet(viewsets.ModelViewSet):
This should probably be moved to a common lib ?
Ueaj
"""
- paginate_by_param = 'limit',
+
+ paginate_by_param = ("limit",)
# use django namespace permissions backend, this is also specified in the
# settings but for some reason it only works when explicitly set here,
# need to investigate
- permission_classes = (nsp_rest.BasePermission, )
+ permission_classes = (nsp_rest.BasePermission,)
def get_queryset(self):
"""
@@ -241,7 +251,8 @@ def get_queryset(self):
if hasattr(self.serializer_class, "prepare_query"):
try:
qset, p_filters = self.serializer_class.prepare_query(
- qset, **self.request.query_params)
+ qset, **self.request.query_params
+ )
except ValidationError, inst:
raise RestValidationError({"detail": str(inst[0])})
except ValueError, inst:
@@ -255,33 +266,28 @@ def get_queryset(self):
p_filters = {}
try:
- since = int(float(self.request.query_params.get('since', 0)))
+ since = int(float(self.request.query_params.get("since", 0)))
except ValueError:
- raise RestValidationError({
- "detail": "'since' needs to be a unix timestamp (epoch seconds)"
- })
+ raise RestValidationError(
+ {"detail": "'since' needs to be a unix timestamp (epoch seconds)"}
+ )
try:
- skip = int(self.request.query_params.get('skip', 0))
+ skip = int(self.request.query_params.get("skip", 0))
except ValueError:
- raise RestValidationError({
- "detail": "'skip' needs to be a number"
- })
+ raise RestValidationError({"detail": "'skip' needs to be a number"})
try:
- limit = int(self.request.query_params.get('limit', 0))
+ limit = int(self.request.query_params.get("limit", 0))
except ValueError:
- raise RestValidationError({
- "detail": "'limit' needs to be a number"
- })
+ raise RestValidationError({"detail": "'limit' needs to be a number"})
try:
depth = int(self.request.query_params.get("depth", 0))
except ValueError:
- raise RestValidationError({
- "detail": "'depth' needs to be a number"
- })
+ raise RestValidationError({"detail": "'depth' needs to be a number"})
- field_names = [fld.name for fld in self.model._meta.get_fields()
- ] + self.serializer_class.queryable_relations()
+ field_names = [
+ fld.name for fld in self.model._meta.get_fields()
+ ] + self.serializer_class.queryable_relations()
date_fields = ["DateTimeField", "DateField"]
@@ -337,9 +343,7 @@ def get_queryset(self):
if timezone.is_naive(v):
v = timezone.make_aware(v)
if "_ctf" in self.request.query_params:
- self.request._ctf = {
- "%s__%s" % (m.group(1), m.group(2)): v
- }
+ self.request._ctf = {"%s__%s" % (m.group(1), m.group(2)): v}
# contains should become icontains because we always
# want it to do case-insensitive checks
@@ -387,9 +391,11 @@ def get_queryset(self):
if not self.kwargs:
if since > 0:
# .filter(status__in=["ok","deleted"])
- qset = qset.since(timestamp=since,
- deleted=True).order_by("updated").filter(
- status__in=["ok", "deleted"])
+ qset = (
+ qset.since(timestamp=since, deleted=True)
+ .order_by("updated")
+ .filter(status__in=["ok", "deleted"])
+ )
else:
qset = qset.filter(status="ok")
else:
@@ -397,7 +403,7 @@ def get_queryset(self):
if not self.kwargs:
if limit > 0:
- qset = qset[skip:skip + limit]
+ qset = qset[skip : skip + limit]
else:
qset = qset[skip:]
@@ -405,13 +411,15 @@ def get_queryset(self):
row_count = qset.count()
if adrl and depth > 0 and row_count > adrl:
qset = qset[:adrl]
- self.request.meta_response[
- "truncated"] = "Your search query (with depth %d) returned more than %d rows and has been truncated. Please be more specific in your filters, use the limit and skip parameters to page through the resultset or drop the depth parameter" % (
- depth, adrl)
+ self.request.meta_response["truncated"] = (
+ "Your search query (with depth %d) returned more than %d rows and has been truncated. Please be more specific in your filters, use the limit and skip parameters to page through the resultset or drop the depth parameter"
+ % (depth, adrl)
+ )
if depth > 0 or self.kwargs:
return self.serializer_class.prefetch_related(
- qset, self.request, is_list=(len(self.kwargs) == 0))
+ qset, self.request, is_list=(len(self.kwargs) == 0)
+ )
else:
return qset
@@ -542,24 +550,24 @@ def list(self, request, *args, **kwargs):
try:
r = super(ModelViewSet, self).list(request, *args, **kwargs)
except ValueError, inst:
- return Response(status=status.HTTP_400_BAD_REQUEST,
- data={"detail": str(inst)})
+ return Response(
+ status=status.HTTP_400_BAD_REQUEST, data={"detail": str(inst)}
+ )
except TypeError, inst:
- return Response(status=status.HTTP_400_BAD_REQUEST,
- data={"detail": str(inst)})
+ return Response(
+ status=status.HTTP_400_BAD_REQUEST, data={"detail": str(inst)}
+ )
except CacheRedirect, inst:
r = Response(status=200, data=inst.loader.load())
d = time.time() - t
print "done in %.5f seconds, %d queries" % (d, len(connection.queries))
- #FIXME: this waits for peeringdb-py fix to deal with 404 raise properly
+ # FIXME: this waits for peeringdb-py fix to deal with 404 raise properly
if not r or not len(r.data):
if self.serializer_class.is_unique_query(request):
return Response(
- status=404, data={
- "data": [],
- "detail": "Entity not found"
- })
+ status=404, data={"data": [], "detail": "Entity not found"}
+ )
return r
@@ -585,13 +593,13 @@ def create(self, request, *args, **kwargs):
with reversion.create_revision():
if request.user:
reversion.set_user(request.user)
- return super(ModelViewSet, self).create(
- request, *args, **kwargs)
+ return super(ModelViewSet, self).create(request, *args, **kwargs)
except PermissionDenied, inst:
return Response(status=status.HTTP_403_FORBIDDEN)
except ParentStatusException, inst:
- return Response(status=status.HTTP_400_BAD_REQUEST,
- data={"detail": str(inst)})
+ return Response(
+ status=status.HTTP_400_BAD_REQUEST, data={"detail": str(inst)}
+ )
finally:
self.get_serializer().finalize_create(request)
@@ -605,14 +613,15 @@ def update(self, request, *args, **kwargs):
if request.user:
reversion.set_user(request.user)
- return super(ModelViewSet, self).update(
- request, *args, **kwargs)
+ return super(ModelViewSet, self).update(request, *args, **kwargs)
except TypeError, inst:
- return Response(status=status.HTTP_400_BAD_REQUEST,
- data={"detail": str(inst)})
+ return Response(
+ status=status.HTTP_400_BAD_REQUEST, data={"detail": str(inst)}
+ )
except ValueError, inst:
- return Response(status=status.HTTP_400_BAD_REQUEST,
- data={"detail": str(inst)})
+ return Response(
+ status=status.HTTP_400_BAD_REQUEST, data={"detail": str(inst)}
+ )
finally:
self.get_serializer().finalize_update(request)
@@ -631,8 +640,9 @@ def destroy(self, request, pk, format=None):
try:
obj = self.model.objects.get(pk=pk)
except ValueError:
- return Response(status=status.HTTP_400_BAD_REQUEST,
- data={"extra": "Invalid id"})
+ return Response(
+ status=status.HTTP_400_BAD_REQUEST, data={"extra": "Invalid id"}
+ )
except self.model.DoesNotExist:
return Response(status=status.HTTP_204_NO_CONTENT)
@@ -648,7 +658,7 @@ def destroy(self, request, pk, format=None):
self.get_serializer().finalize_delete(request)
-pdb_serializers = importlib.import_module('peeringdb_server.serializers')
+pdb_serializers = importlib.import_module("peeringdb_server.serializers")
router = RestRouter(trailing_slash=False)
# router helpers
@@ -664,19 +674,19 @@ def model_view_set(model):
"""
# lookup Serializer class
- scls = getattr(pdb_serializers, model + 'Serializer')
+ scls = getattr(pdb_serializers, model + "Serializer")
- model_t = apps.get_model('peeringdb_server', model)
+ model_t = apps.get_model("peeringdb_server", model)
# setup class attributes
clsdict = {
- 'model': model_t,
- 'serializer_class': scls,
- '__doc__': "Rest API endpoint for " + model,
+ "model": model_t,
+ "serializer_class": scls,
+ "__doc__": "Rest API endpoint for " + model,
}
# create the type
- viewset_t = type(model + 'ViewSet', (ModelViewSet, ), clsdict)
+ viewset_t = type(model + "ViewSet", (ModelViewSet,), clsdict)
# register with the rest router for incoming requests
ref_tag = model_t.handleref.tag
@@ -685,20 +695,19 @@ def model_view_set(model):
return viewset_t
-FacilityViewSet = model_view_set('Facility')
-InternetExchangeViewSet = model_view_set('InternetExchange')
-InternetExchangeFacilityViewSet = model_view_set('InternetExchangeFacility')
-IXLanViewSet = model_view_set('IXLan')
-IXLanPrefixViewSet = model_view_set('IXLanPrefix')
-NetworkViewSet = model_view_set('Network')
-NetworkContactViewSet = model_view_set('NetworkContact')
-NetworkFacilityViewSet = model_view_set('NetworkFacility')
-NetworkIXLanViewSet = model_view_set('NetworkIXLan')
-OrganizationViewSet = model_view_set('Organization')
+FacilityViewSet = model_view_set("Facility")
+InternetExchangeViewSet = model_view_set("InternetExchange")
+InternetExchangeFacilityViewSet = model_view_set("InternetExchangeFacility")
+IXLanViewSet = model_view_set("IXLan")
+IXLanPrefixViewSet = model_view_set("IXLanPrefix")
+NetworkViewSet = model_view_set("Network")
+NetworkContactViewSet = model_view_set("NetworkContact")
+NetworkFacilityViewSet = model_view_set("NetworkFacility")
+NetworkIXLanViewSet = model_view_set("NetworkIXLan")
+OrganizationViewSet = model_view_set("Organization")
class ReadOnlyMixin(object):
-
def destroy(self, request, pk, format=None):
"""
This endpoint is readonly
@@ -724,7 +733,6 @@ def patch(self, request, *args, **kwargs):
return Response(status=status.HTTP_405_METHOD_NOT_ALLOWED)
-
class ASSetViewSet(ReadOnlyMixin, viewsets.ModelViewSet):
"""
AS-SET endpoint
@@ -746,22 +754,33 @@ def retrieve(self, request, asn):
try:
network = Network.objects.get(asn=int(asn))
except ValueError:
- return Response(status=status.HTTP_400_BAD_REQUEST,
- data={"detail": "Invalid ASN"})
+ return Response(
+ status=status.HTTP_400_BAD_REQUEST, data={"detail": "Invalid ASN"}
+ )
except ObjectDoesNotExist:
return Response(status=status.HTTP_404_NOT_FOUND)
- return Response({network.asn : network.irr_as_set})
+ return Response({network.asn: network.irr_as_set})
-
-router.register('as_set', ASSetViewSet, base_name='as_set')
+router.register("as_set", ASSetViewSet, base_name="as_set")
# set here in case we want to add more urls later
urls = router.urls
-REFTAG_MAP = dict([(cls.model.handleref.tag, cls) for cls in [
- OrganizationViewSet, NetworkViewSet, FacilityViewSet,
- InternetExchangeViewSet, InternetExchangeFacilityViewSet,
- NetworkFacilityViewSet, NetworkIXLanViewSet, NetworkContactViewSet,
- IXLanViewSet, IXLanPrefixViewSet
-]])
+REFTAG_MAP = dict(
+ [
+ (cls.model.handleref.tag, cls)
+ for cls in [
+ OrganizationViewSet,
+ NetworkViewSet,
+ FacilityViewSet,
+ InternetExchangeViewSet,
+ InternetExchangeFacilityViewSet,
+ NetworkFacilityViewSet,
+ NetworkIXLanViewSet,
+ NetworkContactViewSet,
+ IXLanViewSet,
+ IXLanPrefixViewSet,
+ ]
+ ]
+)
diff --git a/peeringdb_server/search.py b/peeringdb_server/search.py
index b43d4708..d897ba92 100644
--- a/peeringdb_server/search.py
+++ b/peeringdb_server/search.py
@@ -1,15 +1,17 @@
from django.db.models.signals import post_save, pre_delete
from django.db.models import Q
import peeringdb_server.rest
-from peeringdb_server.models import (UTC, InternetExchange, Network, Facility)
+from peeringdb_server.models import UTC, InternetExchange, Network, Facility
import re
import time
import datetime
import unidecode
+
def unaccent(v):
return unidecode.unidecode(v).lower()
+
# SEARCH INDEX BE STORED HERE
SEARCH_CACHE = {"search_index": {}, "time": 0}
@@ -27,7 +29,7 @@ def hook_save(sender, **kwargs):
if tag not in idx:
idx[tag] = {}
idx.get(tag)[obj.id] = obj
-# print "%d %s refreshed in search index" % (obj.id, tag)
+ # print "%d %s refreshed in search index" % (obj.id, tag)
else:
try:
del idx[tag][obj.id]
@@ -65,7 +67,7 @@ def search(term):
Returns result dict
"""
- search_tags = ('fac', 'ix', 'net')
+ search_tags = ("fac", "ix", "net")
ref_dict = peeringdb_server.rest.ref_dict()
t = time.time()
@@ -73,24 +75,24 @@ def search(term):
# whole db takes 5ish seconds, too slow to cache inline here
search_index = {
- tag:
- {obj.id: obj
- for obj in model.objects.filter(status__in=["ok"])}
- for tag, model in ref_dict.items() if tag in search_tags
+ tag: {obj.id: obj for obj in model.objects.filter(status__in=["ok"])}
+ for tag, model in ref_dict.items()
+ if tag in search_tags
}
for typ, stor in search_index.items():
print "CACHED: %d items in %s" % (len(stor), typ)
- tag_id_re = re.compile('(' + "|".join(search_tags) + '|asn|as)(\d+)')
+ tag_id_re = re.compile("(" + "|".join(search_tags) + "|asn|as)(\d+)")
# FIXME: for now lets force a flush every 120 seconds, might want to look
# at an event based update solution instead
- SEARCH_CACHE.update(search_index=search_index, time=t, update_t=t,
- tag_id_re=tag_id_re)
+ SEARCH_CACHE.update(
+ search_index=search_index, time=t, update_t=t, tag_id_re=tag_id_re
+ )
else:
- search_index = SEARCH_CACHE.get('search_index')
- tag_id_re = SEARCH_CACHE.get('tag_id_re')
+ search_index = SEARCH_CACHE.get("search_index")
+ tag_id_re = SEARCH_CACHE.get("tag_id_re")
# while we are using signals to make sure that the search index gets updated whenever
# a model is saved, right now we still have updates from external sources
@@ -110,16 +112,17 @@ def search(term):
tag: {
obj.id: obj
for obj in model.objects.filter(
- Q(created__gte=dut)
- | Q(updated__gte=dut)).filter(status="ok")
+ Q(created__gte=dut) | Q(updated__gte=dut)
+ ).filter(status="ok")
}
- for tag, model in ref_dict.items() if tag in search_tags
+ for tag, model in ref_dict.items()
+ if tag in search_tags
}
for tag, objects in search_index_update.items():
if tag not in SEARCH_CACHE["search_index"]:
SEARCH_CACHE["search_index"][tag] = dict(
- [(obj.id, obj)
- for obj in ref_dict[tag].objects.filter(status="ok")])
+ [(obj.id, obj) for obj in ref_dict[tag].objects.filter(status="ok")]
+ )
SEARCH_CACHE["search_index"][tag].update(objects)
SEARCH_CACHE["update_t"] = t
@@ -127,8 +130,8 @@ def search(term):
# FIXME: for some reason this gets unset sometimes - need to figure out
# why - for now just recreate when its missing
if not tag_id_re:
- tag_id_re = re.compile('(' + "|".join(search_tags) + '|asn|as)(\d+)')
- SEARCH_CACHE['tag_id_re'] = tag_id_re
+ tag_id_re = re.compile("(" + "|".join(search_tags) + "|asn|as)(\d+)")
+ SEARCH_CACHE["tag_id_re"] = tag_id_re
print "Search index retrieval took %.5f seconds" % (time.time() - t)
@@ -139,7 +142,7 @@ def search(term):
# try to convert to int for numeric search matching
typed_q = {}
try:
- typed_q['int'] = int(term)
+ typed_q["int"] = int(term)
except ValueError:
pass
@@ -158,52 +161,49 @@ def search(term):
for tag, index in search_index.items():
for id, data in index.items():
if unaccent(data.name).find(term) > -1:
- result[tag].append({
- "id": id,
- "name": data.search_result_name,
- "org_id": data.org_id
- })
+ result[tag].append(
+ {"id": id, "name": data.search_result_name, "org_id": data.org_id}
+ )
continue
- if hasattr(data,
- 'name_long') and unaccent(data.name_long).find(term) > -1:
- result[tag].append({
- "id": id,
- "name": data.search_result_name,
- "org_id" : data.org_id
- })
+ if hasattr(data, "name_long") and unaccent(data.name_long).find(term) > -1:
+ result[tag].append(
+ {"id": id, "name": data.search_result_name, "org_id": data.org_id}
+ )
continue
- if hasattr(data, 'aka') and unaccent(data.aka).find(term) > -1:
- result[tag].append({
- "id": id,
- "name": data.search_result_name,
- "org_id": data.org_id
- })
+ if hasattr(data, "aka") and unaccent(data.aka).find(term) > -1:
+ result[tag].append(
+ {"id": id, "name": data.search_result_name, "org_id": data.org_id}
+ )
continue
if typed_q:
if tag in typed_q:
if str(data.id).startswith(typed_q[tag]):
- result[tag].append({
- "id": id,
- "name": data.search_result_name,
- "org_id": data.org_id
- })
+ result[tag].append(
+ {
+ "id": id,
+ "name": data.search_result_name,
+ "org_id": data.org_id,
+ }
+ )
continue
# search asn on everyting? probably just if asn in search
# fields
- if hasattr(data, 'asn'):
- asn = typed_q.get('as',
- typed_q.get('asn',
- str(typed_q.get('int', ''))))
+ if hasattr(data, "asn"):
+ asn = typed_q.get(
+ "as", typed_q.get("asn", str(typed_q.get("int", "")))
+ )
if asn and str(data.asn).startswith(asn):
- result[tag].append({
- "id": id,
- "name": data.search_result_name,
- "org_id": data.org_id
- })
+ result[tag].append(
+ {
+ "id": id,
+ "name": data.search_result_name,
+ "org_id": data.org_id,
+ }
+ )
for k, items in result.items():
result[k] = sorted(items, key=lambda row: row.get("name"))
diff --git a/peeringdb_server/serializers.py b/peeringdb_server/serializers.py
index 06a19f68..338a8fac 100644
--- a/peeringdb_server/serializers.py
+++ b/peeringdb_server/serializers.py
@@ -7,10 +7,14 @@
from django.db.models.query import QuerySet
from django.db.models import Prefetch, Q, Sum, IntegerField, Case, When
from django.db import models, transaction
-from django.db.models.fields.related import ReverseManyToOneDescriptor, ForwardManyToOneDescriptor
+from django.db.models.fields.related import (
+ ReverseManyToOneDescriptor,
+ ForwardManyToOneDescriptor,
+)
from django.core.exceptions import FieldError, ValidationError
from rest_framework import serializers, validators
from rest_framework.exceptions import ValidationError as RestValidationError
+
# from drf_toolbox import serializers
from django_handleref.rest.serializers import HandleRefSerializer
from django.conf import settings
@@ -21,14 +25,30 @@
from django_namespace_perms.util import has_perms
from peeringdb_server.inet import RdapLookup, RdapNotFoundError, get_prefix_protocol
-from peeringdb_server.deskpro import ticket_queue_asnauto_skipvq, ticket_queue_rdap_error
+from peeringdb_server.deskpro import (
+ ticket_queue_asnauto_skipvq,
+ ticket_queue_rdap_error,
+)
from peeringdb_server.models import (
- QUEUE_ENABLED, VerificationQueueItem, InternetExchange,
- InternetExchangeFacility, IXLan, IXLanPrefix, Facility, Network,
- NetworkContact, NetworkFacility, NetworkIXLan, Organization)
+ QUEUE_ENABLED,
+ VerificationQueueItem,
+ InternetExchange,
+ InternetExchangeFacility,
+ IXLan,
+ IXLanPrefix,
+ Facility,
+ Network,
+ NetworkContact,
+ NetworkFacility,
+ NetworkIXLan,
+ Organization,
+)
from peeringdb_server.validators import (
- validate_address_space, validate_info_prefixes4, validate_info_prefixes6,
- validate_prefix_overlap)
+ validate_address_space,
+ validate_info_prefixes4,
+ validate_info_prefixes6,
+ validate_prefix_overlap,
+)
from django.utils.translation import ugettext_lazy as _
@@ -103,8 +123,7 @@ def get_relation_filters(flds, serializer, **kwargs):
if len(rx) in [2, 3] and rx[0] in flds:
rx[0] = queryable_field_xl(rx[0])
rx[1] = queryable_field_xl(rx[1])
- m = re.match("^(.+)__(lt|lte|gt|gte|contains|startswith|in)$",
- k)
+ m = re.match("^(.+)__(lt|lte|gt|gte|contains|startswith|in)$", k)
f = None
if m:
f = m.group(2)
@@ -124,6 +143,7 @@ class UniqueFieldValidator(object):
This should ideally be done in mysql, however we need to clear out the other
duplicates first, so we validate on the django side for now
"""
+
message = _("Need to be unique")
def __init__(self, fields, message=None, check_deleted=False):
@@ -132,7 +152,7 @@ def __init__(self, fields, message=None, check_deleted=False):
self.check_deleted = check_deleted
def set_context(self, serializer):
- self.instance = getattr(serializer, 'instance', None)
+ self.instance = getattr(serializer, "instance", None)
self.model = serializer.Meta.model
def __call__(self, attrs):
@@ -163,12 +183,12 @@ def __init__(self, field, methods=["POST", "PUT"], message=None):
def __call__(self, attrs):
if self.request.method in self.methods and not attrs.get(self.field):
- raise RestValidationError({
- self.field: self.message.format(methods=self.methods)
- })
+ raise RestValidationError(
+ {self.field: self.message.format(methods=self.methods)}
+ )
def set_context(self, serializer):
- self.instance = getattr(serializer, 'instance', None)
+ self.instance = getattr(serializer, "instance", None)
self.request = serializer._context.get("request")
@@ -185,14 +205,15 @@ def __init__(self, fields, message=None):
self.message = message or self.message
def set_context(self, serializer):
- self.instance = getattr(serializer, 'instance', None)
+ self.instance = getattr(serializer, "instance", None)
def __call__(self, attrs):
missing = {
field_name: self.message
- for field_name in self.fields if not attrs.get(field_name)
+ for field_name in self.fields
+ if not attrs.get(field_name)
}
- valid = (len(self.fields) != len(missing.keys()))
+ valid = len(self.fields) != len(missing.keys())
if not valid:
raise RestValidationError(missing)
@@ -223,12 +244,10 @@ def __call__(self, attrs):
self.request.rdap_result = rdap
except RdapException as exc:
self.request.rdap_error = (self.request.user, asn, exc)
- raise RestValidationError({
- self.field: "{}: {}".format(self.message, exc)
- })
+ raise RestValidationError({self.field: "{}: {}".format(self.message, exc)})
def set_context(self, serializer):
- self.instance = getattr(serializer, 'instance', None)
+ self.instance = getattr(serializer, "instance", None)
self.request = serializer._context.get("request")
@@ -248,12 +267,12 @@ def __call__(self, attrs):
if self.field not in attrs:
return
if self.request.method not in self.methods:
- raise RestValidationError({
- self.field: self.message.format(methods=self.methods)
- })
+ raise RestValidationError(
+ {self.field: self.message.format(methods=self.methods)}
+ )
def set_context(self, serializer):
- self.instance = getattr(serializer, 'instance', None)
+ self.instance = getattr(serializer, "instance", None)
self.request = serializer._context.get("request")
@@ -286,28 +305,24 @@ class ParentStatusException(IOError):
def __init__(self, parent, typ):
if parent.status == "pending":
super(IOError, self).__init__(
- _("Object of type '%(type)s' cannot be created because it's parent entity '%(parent_tag)s/%(parent_id)s' has not yet been approved"
- ) % {
- 'type': typ,
- 'parent_tag': parent.ref_tag,
- 'parent_id': parent.id
- })
+ _(
+ "Object of type '%(type)s' cannot be created because it's parent entity '%(parent_tag)s/%(parent_id)s' has not yet been approved"
+ )
+ % {"type": typ, "parent_tag": parent.ref_tag, "parent_id": parent.id}
+ )
elif parent.status == "deleted":
super(IOError, self).__init__(
- _("Object of type '%(type)s' cannot be created because it's parent entity '%(parent_tag)s/%(parent_id)s' has been marked as deleted"
- ) % {
- 'type': typ,
- 'parent_tag': parent.ref_tag,
- 'parent_id': parent.id
- })
+ _(
+ "Object of type '%(type)s' cannot be created because it's parent entity '%(parent_tag)s/%(parent_id)s' has been marked as deleted"
+ )
+ % {"type": typ, "parent_tag": parent.ref_tag, "parent_id": parent.id}
+ )
class AddressSerializer(serializers.ModelSerializer):
class Meta(object):
- model = AddressModel,
- fields = [
- 'address1', 'address2', 'city', 'country', 'state', 'zipcode'
- ]
+ model = (AddressModel,)
+ fields = ["address1", "address2", "city", "country", "state", "zipcode"]
class ModelSerializer(PermissionedModelSerializer):
@@ -379,10 +394,10 @@ def __init__(self, *args, **kwargs):
if not request:
return
- fields = self.context['request'].query_params.get('fields')
+ fields = self.context["request"].query_params.get("fields")
if fields:
- fields = fields.split(',')
+ fields = fields.split(",")
# Drop any fields that are not specified in the `fields` argument.
allowed = set(fields)
existing = set(self.fields.keys())
@@ -408,12 +423,16 @@ def queryable_relations(self):
"""
rv = []
for fld in self.Meta.model._meta.get_fields():
- if hasattr(fld, "get_internal_type") and fld.get_internal_type(
- ) == "ForeignKey":
+ if (
+ hasattr(fld, "get_internal_type")
+ and fld.get_internal_type() == "ForeignKey"
+ ):
model = fld.related_model
for _fld in model._meta.get_fields():
- if hasattr(_fld, "get_internal_type"
- ) and _fld.get_internal_type() != "ForeignKey":
+ if (
+ hasattr(_fld, "get_internal_type")
+ and _fld.get_internal_type() != "ForeignKey"
+ ):
rv.append("%s__%s" % (fld.name, _fld.name))
return rv
@@ -436,10 +455,9 @@ def depth_from_request(cls, request, is_list):
if not request:
raise ValueError("No Request")
return min(
- int(
- request.query_params.get("depth",
- cls.default_depth(is_list))),
- cls.max_depth(is_list))
+ int(request.query_params.get("depth", cls.default_depth(is_list))),
+ cls.max_depth(is_list),
+ )
except ValueError:
return cls.default_depth(is_list)
@@ -462,8 +480,17 @@ def default_depth(cls, is_list):
return 2
@classmethod
- def prefetch_related(cls, qset, request, prefetch=None, related=None,
- nested="", depth=None, is_list=False, single=None):
+ def prefetch_related(
+ cls,
+ qset,
+ request,
+ prefetch=None,
+ related=None,
+ nested="",
+ depth=None,
+ is_list=False,
+ single=None,
+ ):
"""
Prefetch related sets according to depth specified in the request
@@ -546,20 +573,31 @@ def prefetch_related(cls, qset, request, prefetch=None, related=None,
# build the Prefetch object
prefetch.append(
- Prefetch(src_fld, queryset=cls.prefetch_query(
- getattr(cls.Meta.model,
- fld).rel.related_model.objects.filter(
- status="ok"), request),
- to_attr=attr_fld))
+ Prefetch(
+ src_fld,
+ queryset=cls.prefetch_query(
+ getattr(
+ cls.Meta.model, fld
+ ).rel.related_model.objects.filter(status="ok"),
+ request,
+ ),
+ to_attr=attr_fld,
+ )
+ )
# expanded objects within sets may contain sets themselves,
# so make sure to prefetch those as well
cls._declared_fields.get(o_fld).child.prefetch_related(
- qset, request, related=related, prefetch=prefetch,
- nested=route_fld, depth=depth - 1, is_list=is_list)
+ qset,
+ request,
+ related=related,
+ prefetch=prefetch,
+ nested=route_fld,
+ depth=depth - 1,
+ is_list=is_list,
+ )
- elif type(model_field
- ) == ForwardManyToOneDescriptor and not is_list:
+ elif type(model_field) == ForwardManyToOneDescriptor and not is_list:
# single relations
@@ -580,9 +618,15 @@ def prefetch_related(cls, qset, request, prefetch=None, related=None,
# make sure to prefetch those as well
REFTAG_MAP.get(o_fld).prefetch_related(
- qset, request, single=fld, related=related,
- prefetch=prefetch, nested=route_fld, depth=depth - 1,
- is_list=is_list)
+ qset,
+ request,
+ single=fld,
+ related=related,
+ prefetch=prefetch,
+ nested=route_fld,
+ depth=depth - 1,
+ is_list=is_list,
+ )
if not nested:
# print "prefetching", [p.prefetch_through for p in prefetch]
@@ -594,8 +638,7 @@ def prefetch_related(cls, qset, request, prefetch=None, related=None,
def is_root(self):
if not self.parent:
return True
- if type(self.parent
- ) == serializers.ListSerializer and not self.parent.parent:
+ if type(self.parent) == serializers.ListSerializer and not self.parent.parent:
return True
return False
@@ -726,9 +769,7 @@ def run_validation(self, data=serializers.empty):
m = re.match("The fields (.+) must make a unique set.", v)
if m:
for fld in [i.strip() for i in m.group(1).split(",")]:
- filters[fld] = data.get(fld,
- self._unique_filter(
- fld, data))
+ filters[fld] = data.get(fld, self._unique_filter(fld, data))
elif v.find("must be unique") > -1:
filters[k] = data.get(k, self._unique_filter(k, data))
@@ -740,8 +781,10 @@ def run_validation(self, data=serializers.empty):
raise exc
except FieldError:
raise exc
- if has_perms(request.user, self.instance,
- "update") and self.instance.status == "deleted":
+ if (
+ has_perms(request.user, self.instance, "update")
+ and self.instance.status == "deleted"
+ ):
rv = super(ModelSerializer, self).run_validation(data=data)
self._undelete = True
return rv
@@ -764,8 +807,9 @@ def save(self, **kwargs):
if instance.status == "pending":
if self._context["request"]:
vq = VerificationQueueItem.objects.filter(
- content_type=ContentType.objects.get_for_model(
- type(instance)), object_id=instance.id).first()
+ content_type=ContentType.objects.get_for_model(type(instance)),
+ object_id=instance.id,
+ ).first()
if vq:
vq.user = self._context["request"].user
vq.save()
@@ -807,10 +851,7 @@ def request(self):
return None
def to_representation(self, data):
- return [
- self.child.to_representation(self.child.extract(item))
- for item in data
- ]
+ return [self.child.to_representation(self.child.extract(item)) for item in data]
def nested(serializer, exclude=[], getter=None, through=None, **kwargs):
@@ -858,7 +899,8 @@ class FacilitySerializer(ModelSerializer):
"""
org_id = serializers.PrimaryKeyRelatedField(
- queryset=Organization.objects.all(), source="org")
+ queryset=Organization.objects.all(), source="org"
+ )
org_name = serializers.CharField(source="org.name", read_only=True)
org = serializers.SerializerMethodField()
@@ -876,22 +918,35 @@ def has_create_perms(self, user, data):
# we dont want users to be able to create facilities if the parent
# organization status is pending or deleted
if data.get("org") and data.get("org").status != "ok":
- raise ParentStatusException(
- data.get("org"), self.Meta.model.handleref.tag)
+ raise ParentStatusException(data.get("org"), self.Meta.model.handleref.tag)
return super(FacilitySerializer, self).has_create_perms(user, data)
def nsp_namespace_create(self, data):
- return self.Meta.model.nsp_namespace_from_id(
- data.get("org").id, "create")
+ return self.Meta.model.nsp_namespace_from_id(data.get("org").id, "create")
class Meta:
model = Facility
- fields = [
- "id", "org_id", "org_name", "org", "name", "website", "clli",
- "rencode", "npanxx", "notes", "net_count", "latitude", "longitude",
- "suggest"
- ] + HandleRefSerializer.Meta.fields + AddressSerializer.Meta.fields
+ fields = (
+ [
+ "id",
+ "org_id",
+ "org_name",
+ "org",
+ "name",
+ "website",
+ "clli",
+ "rencode",
+ "npanxx",
+ "notes",
+ "net_count",
+ "latitude",
+ "longitude",
+ "suggest",
+ ]
+ + HandleRefSerializer.Meta.fields
+ + AddressSerializer.Meta.fields
+ )
related_fields = ["org"]
@@ -904,8 +959,8 @@ def prepare_query(cls, qset, **kwargs):
qset = qset.select_related("org")
filters = get_relation_filters(
- ["net_id", "net", "ix_id", "ix", "org_name", "net_count"], cls,
- **kwargs)
+ ["net_id", "net", "ix_id", "ix", "org_name", "net_count"], cls, **kwargs
+ )
for field, e in filters.items():
for valid in ["net", "ix"]:
@@ -914,9 +969,7 @@ def prepare_query(cls, qset, **kwargs):
qset = fn(qset=qset, field=field, **e)
break
if field == "org_name":
- flt = {
- "org__name__%s" % (e["filt"] or "icontains"): e["value"]
- }
+ flt = {"org__name__%s" % (e["filt"] or "icontains"): e["value"]}
qset = qset.filter(**flt)
elif field == "network_count":
if e["filt"]:
@@ -927,8 +980,12 @@ def prepare_query(cls, qset, **kwargs):
qset = qset.annotate(
net_count_a=Sum(
Case(
- When(netfac_set__status="ok", then=1), default=0,
- output_field=IntegerField()))).filter(**flt)
+ When(netfac_set__status="ok", then=1),
+ default=0,
+ output_field=IntegerField(),
+ )
+ )
+ ).filter(**flt)
if "asn_overlap" in kwargs:
asns = kwargs.get("asn_overlap", [""])[0].split(",")
@@ -963,9 +1020,11 @@ class InternetExchangeFacilitySerializer(ModelSerializer):
"""
ix_id = serializers.PrimaryKeyRelatedField(
- queryset=InternetExchange.objects.all(), source="ix")
+ queryset=InternetExchange.objects.all(), source="ix"
+ )
fac_id = serializers.PrimaryKeyRelatedField(
- queryset=Facility.objects.all(), source="facility")
+ queryset=Facility.objects.all(), source="facility"
+ )
ix = serializers.SerializerMethodField()
fac = serializers.SerializerMethodField()
@@ -974,22 +1033,27 @@ def has_create_perms(self, user, data):
# we dont want users to be able to create ixfacs if the parent
# ix or fac status is pending or deleted
if data.get("ix") and data.get("ix").status != "ok":
- raise ParentStatusException(
- data.get("ix"), self.Meta.model.handleref.tag)
+ raise ParentStatusException(data.get("ix"), self.Meta.model.handleref.tag)
if data.get("fac") and data.get("fac").status != "ok":
- raise ParentStatusException(
- data.get("fac"), self.Meta.model.handleref.tag)
- return super(InternetExchangeFacilitySerializer,
- self).has_create_perms(user, data)
+ raise ParentStatusException(data.get("fac"), self.Meta.model.handleref.tag)
+ return super(InternetExchangeFacilitySerializer, self).has_create_perms(
+ user, data
+ )
def nsp_namespace_create(self, data):
- return self.Meta.model.nsp_namespace_from_id(data["ix"].org_id,
- data["ix"].id, "create")
+ return self.Meta.model.nsp_namespace_from_id(
+ data["ix"].org_id, data["ix"].id, "create"
+ )
class Meta:
model = InternetExchangeFacility
- fields = ['id', 'ix_id', "ix", "fac_id", "fac"
- ] + HandleRefSerializer.Meta.fields
+ fields = [
+ "id",
+ "ix_id",
+ "ix",
+ "fac_id",
+ "fac",
+ ] + HandleRefSerializer.Meta.fields
list_exclude = ["ix", "fac"]
@@ -1016,8 +1080,9 @@ class NetworkContactSerializer(ModelSerializer):
- net_id, handled by serializer
"""
- net_id = serializers.PrimaryKeyRelatedField(queryset=Network.objects.all(),
- source="network")
+ net_id = serializers.PrimaryKeyRelatedField(
+ queryset=Network.objects.all(), source="network"
+ )
net = serializers.SerializerMethodField()
def has_create_perms(self, user, data):
@@ -1025,13 +1090,14 @@ def has_create_perms(self, user, data):
# network status is pending or deleted
if data.get("network") and data.get("network").status != "ok":
raise ParentStatusException(
- data.get("network"), self.Meta.model.handleref.tag)
- return super(NetworkContactSerializer, self).has_create_perms(
- user, data)
+ data.get("network"), self.Meta.model.handleref.tag
+ )
+ return super(NetworkContactSerializer, self).has_create_perms(user, data)
def nsp_namespace_create(self, data):
return self.Meta.model.nsp_namespace_from_id(
- data["network"].org.id, data["network"].id, "create")
+ data["network"].org.id, data["network"].id, "create"
+ )
class Meta:
model = NetworkContact
@@ -1073,10 +1139,12 @@ class NetworkIXLanSerializer(ModelSerializer):
- ix_id, handled by prepare_query
"""
- net_id = serializers.PrimaryKeyRelatedField(queryset=Network.objects.all(),
- source="network")
- ixlan_id = serializers.PrimaryKeyRelatedField(queryset=IXLan.objects.all(),
- source="ixlan")
+ net_id = serializers.PrimaryKeyRelatedField(
+ queryset=Network.objects.all(), source="network"
+ )
+ ixlan_id = serializers.PrimaryKeyRelatedField(
+ queryset=IXLan.objects.all(), source="ixlan"
+ )
net = serializers.SerializerMethodField()
ixlan = serializers.SerializerMethodField()
@@ -1092,31 +1160,46 @@ def has_create_perms(self, user, data):
# network or ixlan is pending or deleted
if data.get("network") and data.get("network").status != "ok":
raise ParentStatusException(
- data.get("network"), self.Meta.model.handleref.tag)
+ data.get("network"), self.Meta.model.handleref.tag
+ )
if data.get("ixlan") and data.get("ixlan").status != "ok":
raise ParentStatusException(
- data.get("ixlan"), self.Meta.model.handleref.tag)
+ data.get("ixlan"), self.Meta.model.handleref.tag
+ )
return super(NetworkIXLanSerializer, self).has_create_perms(user, data)
def nsp_namespace_create(self, data):
return self.Meta.model.nsp_namespace_from_id(
- data["network"].org.id, data["network"].id, "create")
+ data["network"].org.id, data["network"].id, "create"
+ )
class Meta:
validators = [
SoftRequiredValidator(
- fields=('ipaddr4', 'ipaddr6'),
- message='Input required for IPv4 or IPv6'),
+ fields=("ipaddr4", "ipaddr6"), message="Input required for IPv4 or IPv6"
+ ),
UniqueFieldValidator(
- fields=('ipaddr4', 'ipaddr6'), message='IP already exists')
+ fields=("ipaddr4", "ipaddr6"), message="IP already exists"
+ ),
]
model = NetworkIXLan
depth = 0
fields = [
- 'id', "net_id", "net", "ix_id", "name", "ixlan_id", "ixlan",
- "notes", "speed", "asn", "ipaddr4", "ipaddr6", "is_rs_peer"
+ "id",
+ "net_id",
+ "net",
+ "ix_id",
+ "name",
+ "ixlan_id",
+ "ixlan",
+ "notes",
+ "speed",
+ "asn",
+ "ipaddr4",
+ "ipaddr6",
+ "is_rs_peer",
] + HandleRefSerializer.Meta.fields
related_fields = ["net", "ixlan"]
@@ -1168,12 +1251,12 @@ def validate(self, data):
try:
netixlan.validate_ipaddr4()
except ValidationError as exc:
- raise serializers.ValidationError({"ipaddr4":exc.message})
+ raise serializers.ValidationError({"ipaddr4": exc.message})
try:
netixlan.validate_ipaddr6()
except ValidationError as exc:
- raise serializers.ValidationError({"ipaddr6":exc.message})
+ raise serializers.ValidationError({"ipaddr6": exc.message})
return data
@@ -1189,9 +1272,11 @@ class NetworkFacilitySerializer(ModelSerializer):
# facilities = serializers.PrimaryKeyRelatedField(queryset='fac_set', many=True)
fac_id = serializers.PrimaryKeyRelatedField(
- queryset=Facility.objects.all(), source="facility")
- net_id = serializers.PrimaryKeyRelatedField(queryset=Network.objects.all(),
- source="network")
+ queryset=Facility.objects.all(), source="facility"
+ )
+ net_id = serializers.PrimaryKeyRelatedField(
+ queryset=Network.objects.all(), source="network"
+ )
fac = serializers.SerializerMethodField()
net = serializers.SerializerMethodField()
@@ -1204,7 +1289,7 @@ class Meta:
model = NetworkFacility
depth = 0
fields = [
- 'id',
+ "id",
"name",
"city",
"country",
@@ -1222,8 +1307,7 @@ class Meta:
@classmethod
def prepare_query(cls, qset, **kwargs):
- filters = get_relation_filters(["name", "country", "city"], cls,
- **kwargs)
+ filters = get_relation_filters(["name", "country", "city"], cls, **kwargs)
for field, e in filters.items():
for valid in ["name", "country", "city"]:
if validate_relation_filter_field(field, valid):
@@ -1239,16 +1323,18 @@ def has_create_perms(self, user, data):
# network or facility status is pending or deleted
if data.get("network") and data.get("network").status != "ok":
raise ParentStatusException(
- data.get("network"), self.Meta.model.handleref.tag)
+ data.get("network"), self.Meta.model.handleref.tag
+ )
if data.get("facility") and data.get("facility").status != "ok":
raise ParentStatusException(
- data.get("facility"), self.Meta.model.handleref.tag)
- return super(NetworkFacilitySerializer, self).has_create_perms(
- user, data)
+ data.get("facility"), self.Meta.model.handleref.tag
+ )
+ return super(NetworkFacilitySerializer, self).has_create_perms(user, data)
def nsp_namespace_create(self, data):
return self.Meta.model.nsp_namespace_from_id(
- data["network"].org.id, data["network"].id, "create")
+ data["network"].org.id, data["network"].id, "create"
+ )
def get_net(self, inst):
return self.sub_serializer(NetworkSerializer, inst.network)
@@ -1282,45 +1368,77 @@ class NetworkSerializer(ModelSerializer):
- netfac_id, handled by prepare_query
- fac_id, handled by prepare_query
"""
- netfac_set = nested(NetworkFacilitySerializer, exclude=["net_id", "net"],
- source="netfac_set_active_prefetched")
-
- poc_set = nested(NetworkContactSerializer, exclude=["net_id", "net"],
- source="poc_set_active_prefetched")
-
- netixlan_set = nested(NetworkIXLanSerializer, exclude=["net_id", "net"],
- source="netixlan_set_active_prefetched")
+ netfac_set = nested(
+ NetworkFacilitySerializer,
+ exclude=["net_id", "net"],
+ source="netfac_set_active_prefetched",
+ )
+
+ poc_set = nested(
+ NetworkContactSerializer,
+ exclude=["net_id", "net"],
+ source="poc_set_active_prefetched",
+ )
+
+ netixlan_set = nested(
+ NetworkIXLanSerializer,
+ exclude=["net_id", "net"],
+ source="netixlan_set_active_prefetched",
+ )
org_id = serializers.PrimaryKeyRelatedField(
- queryset=Organization.objects.all(), source="org")
+ queryset=Organization.objects.all(), source="org"
+ )
org = serializers.SerializerMethodField()
route_server = ExtendedURLField(required=False, allow_blank=True)
- info_prefixes4 = SaneIntegerField(allow_null=False, required=False,
- validators=[validate_info_prefixes4])
- info_prefixes6 = SaneIntegerField(allow_null=False, required=False,
- validators=[validate_info_prefixes6])
+ info_prefixes4 = SaneIntegerField(
+ allow_null=False, required=False, validators=[validate_info_prefixes4]
+ )
+ info_prefixes6 = SaneIntegerField(
+ allow_null=False, required=False, validators=[validate_info_prefixes6]
+ )
suggest = serializers.BooleanField(required=False, write_only=True)
- validators = [
- AsnRdapValidator(),
- FieldMethodValidator("suggest", ["POST"])
- ]
+ validators = [AsnRdapValidator(), FieldMethodValidator("suggest", ["POST"])]
class Meta:
model = Network
depth = 1
fields = [
- 'id', "org_id", "org", "name", "aka", "website", "asn",
- "looking_glass", "route_server", "irr_as_set", "info_type",
- "info_prefixes4", "info_prefixes6", "info_traffic", "info_ratio",
- "info_scope", "info_unicast", "info_multicast", "info_ipv6",
- "notes", "policy_url", "policy_general", "policy_locations",
- "policy_ratio", "policy_contracts", "netfac_set", "netixlan_set",
- "poc_set", "allow_ixp_update", "suggest"
+ "id",
+ "org_id",
+ "org",
+ "name",
+ "aka",
+ "website",
+ "asn",
+ "looking_glass",
+ "route_server",
+ "irr_as_set",
+ "info_type",
+ "info_prefixes4",
+ "info_prefixes6",
+ "info_traffic",
+ "info_ratio",
+ "info_scope",
+ "info_unicast",
+ "info_multicast",
+ "info_ipv6",
+ "notes",
+ "policy_url",
+ "policy_general",
+ "policy_locations",
+ "policy_ratio",
+ "policy_contracts",
+ "netfac_set",
+ "netixlan_set",
+ "poc_set",
+ "allow_ixp_update",
+ "suggest",
] + HandleRefSerializer.Meta.fields
- default_fields = ['id', "name", "asn"]
+ default_fields = ["id", "name", "asn"]
related_fields = [
"org",
"netfac_set",
@@ -1340,10 +1458,22 @@ def prepare_query(cls, qset, **kwargs):
Currently supports: ixlan_id, ix_id, netixlan_id, netfac_id, fac_id
"""
- filters = get_relation_filters([
- "ixlan_id", "ixlan", "ix_id", "ix", "netixlan_id", "netixlan",
- "netfac_id", "netfac", "fac", "fac_id"
- ], cls, **kwargs)
+ filters = get_relation_filters(
+ [
+ "ixlan_id",
+ "ixlan",
+ "ix_id",
+ "ix",
+ "netixlan_id",
+ "netixlan",
+ "netfac_id",
+ "netfac",
+ "fac",
+ "fac_id",
+ ],
+ cls,
+ **kwargs
+ )
for field, e in filters.items():
for valid in ["ix", "ixlan", "netixlan", "netfac", "fac"]:
@@ -1354,8 +1484,7 @@ def prepare_query(cls, qset, **kwargs):
if "name_search" in kwargs:
name = kwargs.get("name_search", [""])[0]
- qset = qset.filter(
- Q(name__icontains=name) | Q(aka__icontains=name))
+ qset = qset.filter(Q(name__icontains=name) | Q(aka__icontains=name))
filters.update({"name_search": kwargs.get("name_search")})
# networks that are NOT present at exchange
@@ -1390,8 +1519,10 @@ def to_internal_value(self, data):
# with a specific error message indicating it (#288)
if Network.objects.filter(asn=data.get("asn"), status="deleted").exists():
- errmsg = _("Network has been deleted. Please contact {}").format(settings.DEFAULT_FROM_EMAIL)
- raise RestValidationError({"asn":errmsg})
+ errmsg = _("Network has been deleted. Please contact {}").format(
+ settings.DEFAULT_FROM_EMAIL
+ )
+ raise RestValidationError({"asn": errmsg})
return super(NetworkSerializer, self).to_internal_value(data)
@@ -1399,13 +1530,11 @@ def has_create_perms(self, user, data):
# we dont want users to be able to create networks if the parent
# organization status is pending or deleted
if data.get("org") and data.get("org").status != "ok":
- raise ParentStatusException(
- data.get("org"), self.Meta.model.handleref.tag)
+ raise ParentStatusException(data.get("org"), self.Meta.model.handleref.tag)
return super(NetworkSerializer, self).has_create_perms(user, data)
def nsp_namespace_create(self, data):
- return self.Meta.model.nsp_namespace_from_id(
- data.get("org").id, "create")
+ return self.Meta.model.nsp_namespace_from_id(data.get("org").id, "create")
def get_org(self, inst):
return self.sub_serializer(OrganizationSerializer, inst.org)
@@ -1435,8 +1564,9 @@ def create(self, validated_data):
if rdap and user.validate_rdap_relationship(rdap):
# user email exists in RiR data, skip verification queue
validated_data["status"] = "ok"
- ticket_queue_asnauto_skipvq(user, validated_data["org"],
- validated_data, rdap)
+ ticket_queue_asnauto_skipvq(
+ user, validated_data["org"], validated_data, rdap
+ )
elif self.Meta.model in QUEUE_ENABLED:
# user email does NOT exist in RiR data, put into verification
@@ -1454,7 +1584,6 @@ def finalize_create(self, request):
ticket_queue_rdap_error(*rdap_error)
-
class IXLanPrefixSerializer(ModelSerializer):
"""
Serializer for peeringdb_server.models.IXLanPrefix
@@ -1464,26 +1593,33 @@ class IXLanPrefixSerializer(ModelSerializer):
- ix_id, handled by prepare_query
"""
- ixlan_id = serializers.PrimaryKeyRelatedField(queryset=IXLan.objects.all(),
- source="ixlan")
+ ixlan_id = serializers.PrimaryKeyRelatedField(
+ queryset=IXLan.objects.all(), source="ixlan"
+ )
ixlan = serializers.SerializerMethodField()
- prefix = IPPrefixField(validators=[
- validators.UniqueValidator(
- queryset=IXLanPrefix.objects.all()),
+ prefix = IPPrefixField(
+ validators=[
+ validators.UniqueValidator(queryset=IXLanPrefix.objects.all()),
validate_address_space,
validate_prefix_overlap,
- ])
+ ]
+ )
class Meta:
model = IXLanPrefix
- fields = ['id', 'ixlan', 'ixlan_id', 'protocol', 'prefix'
- ] + HandleRefSerializer.Meta.fields
+ fields = [
+ "id",
+ "ixlan",
+ "ixlan_id",
+ "protocol",
+ "prefix",
+ ] + HandleRefSerializer.Meta.fields
- related_fields = ['ixlan']
+ related_fields = ["ixlan"]
- list_exclude = ['ixlan']
+ list_exclude = ["ixlan"]
@classmethod
def prepare_query(cls, qset, **kwargs):
@@ -1502,13 +1638,14 @@ def has_create_perms(self, user, data):
# ixlan status is pending or deleted
if data.get("ixlan") and data.get("ixlan").status != "ok":
raise ParentStatusException(
- data.get("ixlan"), self.Meta.model.handleref.tag)
+ data.get("ixlan"), self.Meta.model.handleref.tag
+ )
return super(IXLanPrefixSerializer, self).has_create_perms(user, data)
def nsp_namespace_create(self, data):
return self.Meta.model.nsp_namespace_from_id(
- data["ixlan"].ix.org.id, data["ixlan"].ix.id, data["ixlan"].id,
- "create")
+ data["ixlan"].ix.org.id, data["ixlan"].ix.id, data["ixlan"].id, "create"
+ )
def get_ixlan(self, inst):
return self.sub_serializer(IXLanSerializer, inst.ixlan)
@@ -1546,42 +1683,60 @@ class IXLanSerializer(ModelSerializer):
"""
ix_id = serializers.PrimaryKeyRelatedField(
- queryset=InternetExchange.objects.all(), source="ix")
+ queryset=InternetExchange.objects.all(), source="ix"
+ )
ix = serializers.SerializerMethodField()
- net_set = nested(NetworkSerializer,
- source="netixlan_set_active_prefetched",
- through="netixlan_set", getter="network")
- ixpfx_set = nested(IXLanPrefixSerializer, exclude=["ixlan_id", "ixlan"],
- source="ixpfx_set_active_prefetched")
+ net_set = nested(
+ NetworkSerializer,
+ source="netixlan_set_active_prefetched",
+ through="netixlan_set",
+ getter="network",
+ )
+ ixpfx_set = nested(
+ IXLanPrefixSerializer,
+ exclude=["ixlan_id", "ixlan"],
+ source="ixpfx_set_active_prefetched",
+ )
def has_create_perms(self, user, data):
# we dont want users to be able to create ixlans if the parent
# ix status is pending or deleted
if data.get("ix") and data.get("ix").status != "ok":
- raise ParentStatusException(
- data.get("ix"), self.Meta.model.handleref.tag)
+ raise ParentStatusException(data.get("ix"), self.Meta.model.handleref.tag)
return super(IXLanSerializer, self).has_create_perms(user, data)
def nsp_namespace_create(self, data):
- return self.Meta.model.nsp_namespace_from_id(data["ix"].org_id,
- data["ix"].id, "create")
+ return self.Meta.model.nsp_namespace_from_id(
+ data["ix"].org_id, data["ix"].id, "create"
+ )
class Meta:
model = IXLan
fields = [
- 'id', 'ix_id', "ix", "name", "descr", "mtu", "dot1q_support",
- "rs_asn", "arp_sponge", "net_set", "ixpfx_set",
- "ixf_ixp_member_list_url", "ixf_ixp_import_enabled",
+ "id",
+ "ix_id",
+ "ix",
+ "name",
+ "descr",
+ "mtu",
+ "dot1q_support",
+ "rs_asn",
+ "arp_sponge",
+ "net_set",
+ "ixpfx_set",
+ "ixf_ixp_member_list_url",
+ "ixf_ixp_import_enabled",
] + HandleRefSerializer.Meta.fields
related_fields = ["ix", "net_set", "ixpfx_set"]
list_exclude = ["ix"]
- extra_kwargs = {"ixf_ixp_member_list_url": {"write_only": True},
- "ixf_ixp_import_enabled": {"write_only": True},
- }
+ extra_kwargs = {
+ "ixf_ixp_member_list_url": {"write_only": True},
+ "ixf_ixp_import_enabled": {"write_only": True},
+ }
_ref_tag = model.handleref.tag
@@ -1606,14 +1761,20 @@ class InternetExchangeSerializer(ModelSerializer):
"""
org_id = serializers.PrimaryKeyRelatedField(
- queryset=Organization.objects.all(), source="org")
+ queryset=Organization.objects.all(), source="org"
+ )
org = serializers.SerializerMethodField()
- ixlan_set = nested(IXLanSerializer, exclude=["ix_id", "ix"],
- source="ixlan_set_active_prefetched")
- fac_set = nested(FacilitySerializer, source="ixfac_set_active_prefetched",
- through="ixfac_set", getter="facility")
+ ixlan_set = nested(
+ IXLanSerializer, exclude=["ix_id", "ix"], source="ixlan_set_active_prefetched"
+ )
+ fac_set = nested(
+ FacilitySerializer,
+ source="ixfac_set_active_prefetched",
+ through="ixfac_set",
+ getter="facility",
+ )
net_count = serializers.SerializerMethodField()
@@ -1623,28 +1784,54 @@ class InternetExchangeSerializer(ModelSerializer):
# creation. It will be a required field during `POST` requests
# but will be ignored during `PUT` so we cannot just do
# required=True here
- prefix = IPPrefixField(validators=[
- validators.UniqueValidator(
- queryset=IXLanPrefix.objects.filter(status__in=["ok", "pending"])),
- validate_address_space,
- validate_prefix_overlap,
- ], required=False, write_only=True)
+ prefix = IPPrefixField(
+ validators=[
+ validators.UniqueValidator(
+ queryset=IXLanPrefix.objects.filter(status__in=["ok", "pending"])
+ ),
+ validate_address_space,
+ validate_prefix_overlap,
+ ],
+ required=False,
+ write_only=True,
+ )
validators = [
FieldMethodValidator("suggest", ["POST"]),
RequiredForMethodValidator("prefix", ["POST"]),
- SoftRequiredValidator(["policy_email", "tech_email"],
- message=_("Specify at least one email address"))
+ SoftRequiredValidator(
+ ["policy_email", "tech_email"],
+ message=_("Specify at least one email address"),
+ ),
]
class Meta:
model = InternetExchange
fields = [
- 'id', "org_id", "org", "name", "name_long", "city", "country",
- "region_continent", "media", "notes", "proto_unicast",
- "proto_multicast", "proto_ipv6", "website", "url_stats",
- "tech_email", "tech_phone", "policy_email", "policy_phone",
- "fac_set", "ixlan_set", "suggest", "prefix", "net_count"
+ "id",
+ "org_id",
+ "org",
+ "name",
+ "name_long",
+ "city",
+ "country",
+ "region_continent",
+ "media",
+ "notes",
+ "proto_unicast",
+ "proto_multicast",
+ "proto_ipv6",
+ "website",
+ "url_stats",
+ "tech_email",
+ "tech_phone",
+ "policy_email",
+ "policy_phone",
+ "fac_set",
+ "ixlan_set",
+ "suggest",
+ "prefix",
+ "net_count",
] + HandleRefSerializer.Meta.fields
_ref_tag = model.handleref.tag
related_fields = ["org", "fac_set", "ixlan_set"]
@@ -1653,10 +1840,21 @@ class Meta:
@classmethod
def prepare_query(cls, qset, **kwargs):
- filters = get_relation_filters([
- "ixlan_id", "ixlan", "ixfac_id", "ixfac", "fac_id", "fac",
- "net_id", "net", "net_count"
- ], cls, **kwargs)
+ filters = get_relation_filters(
+ [
+ "ixlan_id",
+ "ixlan",
+ "ixfac_id",
+ "ixfac",
+ "fac_id",
+ "fac",
+ "net_id",
+ "net",
+ "net_count",
+ ],
+ cls,
+ **kwargs
+ )
for field, e in filters.items():
for valid in ["ixlan", "ixfac", "fac", "net"]:
@@ -1670,13 +1868,13 @@ def prepare_query(cls, qset, **kwargs):
if "ipblock" in kwargs:
qset = cls.Meta.model.related_to_ipblock(
- kwargs.get("ipblock", [""])[0], qset=qset)
+ kwargs.get("ipblock", [""])[0], qset=qset
+ )
filters.update({"ipblock": kwargs.get("ipblock")})
if "name_search" in kwargs:
name = kwargs.get("name_search", [""])[0]
- qset = qset.filter(
- Q(name__icontains=name) | Q(name_long__icontains=name))
+ qset = qset.filter(Q(name__icontains=name) | Q(name_long__icontains=name))
filters.update({"name_search": kwargs.get("name_search")})
if "asn_overlap" in kwargs:
@@ -1690,10 +1888,8 @@ def has_create_perms(self, user, data):
# we dont want users to be able to create internet exchanges if the parent
# organization status is pending or deleted
if data.get("org") and data.get("org").status != "ok":
- raise ParentStatusException(
- data.get("org"), self.Meta.model.handleref.tag)
- return super(InternetExchangeSerializer, self).has_create_perms(
- user, data)
+ raise ParentStatusException(data.get("org"), self.Meta.model.handleref.tag)
+ return super(InternetExchangeSerializer, self).has_create_perms(user, data)
def to_internal_value(self, data):
# if `suggest` keyword is provided, hard-set the org to
@@ -1720,8 +1916,7 @@ def create(self, validated_data):
ixlan = IXLan.objects.create(name="Main", ix=r, status="pending")
# see if prefix already exists in a deleted state
- ixpfx = IXLanPrefix.objects.filter(prefix=prefix,
- status="deleted").first()
+ ixpfx = IXLanPrefix.objects.filter(prefix=prefix, status="deleted").first()
if ixpfx:
# if it does, we want to re-assign it to this ix and
# undelete it
@@ -1731,14 +1926,16 @@ def create(self, validated_data):
else:
# if it does not exist we will create a new ixpfx object
ixpfx = IXLanPrefix.objects.create(
- ixlan=ixlan, prefix=prefix, status="pending",
- protocol=get_prefix_protocol(prefix))
+ ixlan=ixlan,
+ prefix=prefix,
+ status="pending",
+ protocol=get_prefix_protocol(prefix),
+ )
return r
def nsp_namespace_create(self, data):
- return self.Meta.model.nsp_namespace_from_id(
- data.get("org").id, "create")
+ return self.Meta.model.nsp_namespace_from_id(data.get("org").id, "create")
def get_org(self, inst):
return self.sub_serializer(OrganizationSerializer, inst.org)
@@ -1752,14 +1949,21 @@ class OrganizationSerializer(ModelSerializer):
Serializer for peeringdb_server.models.Organization
"""
- net_set = nested(NetworkSerializer, exclude=["org_id", "org"],
- source="net_set_active_prefetched")
+ net_set = nested(
+ NetworkSerializer, exclude=["org_id", "org"], source="net_set_active_prefetched"
+ )
- fac_set = nested(FacilitySerializer, exclude=["org_id", "org"],
- source="fac_set_active_prefetched")
+ fac_set = nested(
+ FacilitySerializer,
+ exclude=["org_id", "org"],
+ source="fac_set_active_prefetched",
+ )
- ix_set = nested(InternetExchangeSerializer, exclude=["org_id", "org"],
- source="ix_set_active_prefetched")
+ ix_set = nested(
+ InternetExchangeSerializer,
+ exclude=["org_id", "org"],
+ source="ix_set_active_prefetched",
+ )
def nsp_namespace_create(self, data):
return self.Meta.model.nsp_namespace_from_id("create")
@@ -1767,9 +1971,11 @@ def nsp_namespace_create(self, data):
class Meta: # (AddressSerializer.Meta):
model = Organization
depth = 1
- fields = [
- 'id', 'name', 'website', 'notes', 'net_set', 'fac_set', 'ix_set'
- ] + AddressSerializer.Meta.fields + HandleRefSerializer.Meta.fields
+ fields = (
+ ["id", "name", "website", "notes", "net_set", "fac_set", "ix_set"]
+ + AddressSerializer.Meta.fields
+ + HandleRefSerializer.Meta.fields
+ )
related_fields = [
"fac_set",
"net_set",
@@ -1780,10 +1986,19 @@ class Meta: # (AddressSerializer.Meta):
REFTAG_MAP = dict(
- [(cls.Meta.model.handleref.tag, cls)
- for cls in [
- OrganizationSerializer, NetworkSerializer, FacilitySerializer,
- InternetExchangeSerializer, InternetExchangeFacilitySerializer,
- NetworkFacilitySerializer, NetworkIXLanSerializer,
- NetworkContactSerializer, IXLanSerializer, IXLanPrefixSerializer
- ]])
+ [
+ (cls.Meta.model.handleref.tag, cls)
+ for cls in [
+ OrganizationSerializer,
+ NetworkSerializer,
+ FacilitySerializer,
+ InternetExchangeSerializer,
+ InternetExchangeFacilitySerializer,
+ NetworkFacilitySerializer,
+ NetworkIXLanSerializer,
+ NetworkContactSerializer,
+ IXLanSerializer,
+ IXLanPrefixSerializer,
+ ]
+ ]
+)
diff --git a/peeringdb_server/settings.py b/peeringdb_server/settings.py
index cd09ac13..9f789c7e 100644
--- a/peeringdb_server/settings.py
+++ b/peeringdb_server/settings.py
@@ -1,12 +1,15 @@
import os
from django.conf import settings
-PEERINGDB_VERSION = getattr(settings, 'PACKAGE_VERSION', '')
-RDAP_URL = getattr(settings, 'PEERINGDB_RDAP_URL', 'https://rdap.db.ripe.net/')
-RDAP_LACNIC_APIKEY = getattr(settings, 'PEERINGDB_RDAP_LACNIC_APIKEY', None)
-RDAP_RECURSE_ROLES = getattr(settings, 'PEERINGDB_RDAP_RECURSE_ROLES',
- ["administrative", "technical"])
-TUTORIAL_MODE = getattr(settings, 'TUTORIAL_MODE', False)
-AUTO_APPROVE_AFFILIATION = getattr(settings, 'AUTO_APPROVE_AFFILIATION', False)
-AUTO_VERIFY_USERS = getattr(settings, 'AUTO_VERIFY_USERS', False)
-MAINTENANCE_MODE_LOCKFILE = getattr(settings, 'MAINTENANCE_MODE_LOCKFILE', 'maintenance.lock')
+PEERINGDB_VERSION = getattr(settings, "PACKAGE_VERSION", "")
+RDAP_URL = getattr(settings, "PEERINGDB_RDAP_URL", "https://rdap.db.ripe.net/")
+RDAP_LACNIC_APIKEY = getattr(settings, "PEERINGDB_RDAP_LACNIC_APIKEY", None)
+RDAP_RECURSE_ROLES = getattr(
+ settings, "PEERINGDB_RDAP_RECURSE_ROLES", ["administrative", "technical"]
+)
+TUTORIAL_MODE = getattr(settings, "TUTORIAL_MODE", False)
+AUTO_APPROVE_AFFILIATION = getattr(settings, "AUTO_APPROVE_AFFILIATION", False)
+AUTO_VERIFY_USERS = getattr(settings, "AUTO_VERIFY_USERS", False)
+MAINTENANCE_MODE_LOCKFILE = getattr(
+ settings, "MAINTENANCE_MODE_LOCKFILE", "maintenance.lock"
+)
diff --git a/peeringdb_server/signals.py b/peeringdb_server/signals.py
index 696c2ab6..7e4d9d2d 100644
--- a/peeringdb_server/signals.py
+++ b/peeringdb_server/signals.py
@@ -14,12 +14,23 @@
from peeringdb_server.inet import RdapLookup, RdapNotFoundError, RdapException
-from peeringdb_server.deskpro import (ticket_queue, ticket_queue_asnauto_affil,
- ticket_queue_asnauto_create)
+from peeringdb_server.deskpro import (
+ ticket_queue,
+ ticket_queue_asnauto_affil,
+ ticket_queue_asnauto_create,
+)
from peeringdb_server.models import (
- QUEUE_ENABLED, QUEUE_NOTIFY, UserOrgAffiliationRequest, is_suggested,
- VerificationQueueItem, Organization, Facility, Network, NetworkContact)
+ QUEUE_ENABLED,
+ QUEUE_NOTIFY,
+ UserOrgAffiliationRequest,
+ is_suggested,
+ VerificationQueueItem,
+ Organization,
+ Facility,
+ Network,
+ NetworkContact,
+)
import peeringdb_server.settings as pdb_settings
@@ -42,7 +53,7 @@ def addressmodel_save(sender, instance=None, **kwargs):
a = getattr(instance, field.name)
b = getattr(old, field.name)
if a != b:
- #print("Change in field '%s' - '%s'(%s) to '%s'(%s) - marking %s for geocode sync" % (field.name, a, type(a), b, type(b), instance))
+ # print("Change in field '%s' - '%s'(%s) to '%s'(%s) - marking %s for geocode sync" % (field.name, a, type(a), b, type(b), instance))
# address model field has changed, mark for geocode sync
instance.geocode_status = False
@@ -65,13 +76,16 @@ def org_save(sender, **kwargs):
group = Group(name=inst.group_name)
group.save()
- perm = GroupPermission(group=group, namespace=inst.nsp_namespace,
- permissions=PERM_READ)
+ perm = GroupPermission(
+ group=group, namespace=inst.nsp_namespace, permissions=PERM_READ
+ )
perm.save()
GroupPermission(
- group=group, namespace=NetworkContact.nsp_namespace_from_id(
- inst.id, "*", "private"), permissions=PERM_READ).save()
+ group=group,
+ namespace=NetworkContact.nsp_namespace_from_id(inst.id, "*", "private"),
+ permissions=PERM_READ,
+ ).save()
# make the admin group for the org
try:
@@ -80,16 +94,20 @@ def org_save(sender, **kwargs):
group = Group(name=inst.admin_group_name)
group.save()
- perm = GroupPermission(group=group, namespace=inst.nsp_namespace,
- permissions=PERM_CRUD)
+ perm = GroupPermission(
+ group=group, namespace=inst.nsp_namespace, permissions=PERM_CRUD
+ )
perm.save()
- GroupPermission(group=group, namespace=inst.nsp_namespace_manage,
- permissions=PERM_CRUD).save()
+ GroupPermission(
+ group=group, namespace=inst.nsp_namespace_manage, permissions=PERM_CRUD
+ ).save()
GroupPermission(
- group=group, namespace=NetworkContact.nsp_namespace_from_id(
- inst.id, "*", "private"), permissions=PERM_CRUD).save()
+ group=group,
+ namespace=NetworkContact.nsp_namespace_from_id(inst.id, "*", "private"),
+ permissions=PERM_CRUD,
+ ).save()
if inst.status == "deleted":
for ar in inst.affiliation_requests.all():
@@ -162,12 +180,10 @@ def uoar_creation(sender, instance, created=False, **kwargs):
instance.status = "pending"
instance.save()
- if instance.org_id and instance.org.admin_usergroup.user_set.count(
- ) > 0:
+ if instance.org_id and instance.org.admin_usergroup.user_set.count() > 0:
# check that user is not already a member of that org
- if instance.user.groups.filter(
- name=instance.org.usergroup.name).exists():
+ if instance.user.groups.filter(name=instance.org.usergroup.name).exists():
instance.approve()
return
@@ -176,16 +192,21 @@ def uoar_creation(sender, instance, created=False, **kwargs):
for user in instance.org.admin_usergroup.user_set.all():
with override(user.locale):
user.email_user(
- _(u"User %(u_name)s wishes to be affiliated to your Organization"
- ) % {'u_name': instance.user.full_name},
+ _(
+ u"User %(u_name)s wishes to be affiliated to your Organization"
+ )
+ % {"u_name": instance.user.full_name},
loader.get_template(
- 'email/notify-org-admin-user-affil.txt').render({
+ "email/notify-org-admin-user-affil.txt"
+ ).render(
+ {
"user": instance.user,
"org": instance.org,
- "org_management_url": '%s/org/%d#users' %
- (settings.BASE_URL,
- instance.org.id)
- }))
+ "org_management_url": "%s/org/%d#users"
+ % (settings.BASE_URL, instance.org.id),
+ }
+ ),
+ )
else:
request_type = "be affiliated to"
rdap_data = {"emails": []}
@@ -204,12 +225,14 @@ def uoar_creation(sender, instance, created=False, **kwargs):
# create organization
instance.org, org_created = Organization.create_from_rdap(
- rdap, instance.asn, instance.org_name)
+ rdap, instance.asn, instance.org_name
+ )
instance.save()
# create network
net, net_created = Network.create_from_rdap(
- rdap, instance.asn, instance.org)
+ rdap, instance.asn, instance.org
+ )
# if affiliate auto appove is on, auto approve at this point
if pdb_settings.AUTO_APPROVE_AFFILIATION:
@@ -217,15 +240,20 @@ def uoar_creation(sender, instance, created=False, **kwargs):
return
ticket_queue_asnauto_create(
- instance.user, instance.org, net, rdap, net.asn,
- org_created=org_created, net_created=net_created)
+ instance.user,
+ instance.org,
+ net,
+ rdap,
+ net.asn,
+ org_created=org_created,
+ net_created=net_created,
+ )
# if user's relationship to network can be validated now
# we can approve the ownership request right away
if instance.user.validate_rdap_relationship(rdap):
instance.approve()
- ticket_queue_asnauto_affil(instance.user, instance.org,
- net, rdap)
+ ticket_queue_asnauto_affil(instance.user, instance.org, net, rdap)
return
if instance.org:
@@ -247,51 +275,67 @@ def uoar_creation(sender, instance, created=False, **kwargs):
rdap_data["emails"].extend(rdap.emails)
if instance.user.validate_rdap_relationship(rdap):
ticket_queue_asnauto_affil(
- instance.user, instance.org,
- Network.objects.get(asn=asn), rdap)
+ instance.user,
+ instance.org,
+ Network.objects.get(asn=asn),
+ rdap,
+ )
instance.approve()
return
else:
entity_name = instance.org_name
if pdb_settings.AUTO_APPROVE_AFFILIATION:
- org = Organization.objects.create(name=instance.org_name, status="ok")
+ org = Organization.objects.create(
+ name=instance.org_name, status="ok"
+ )
instance.org = org
instance.approve()
return
-
-
# organization has no owners and RDAP information could not verify the user's relationship to the organization, notify pdb staff for review
ticket_queue(
- u'User %s wishes to %s %s' % (instance.user.username,
- request_type, entity_name),
- loader.get_template('email/notify-pdb-admin-user-affil.txt')
- .render({
- "user": instance.user,
- "instance": instance,
- "base_url": settings.BASE_URL,
- "org_add_url": "%s%s" % (
- settings.BASE_URL,
- urlresolvers.reverse(
- "admin:peeringdb_server_organization_add")),
- "net_add_url": "%s%s" %
- (settings.BASE_URL,
- urlresolvers.reverse(
- "admin:peeringdb_server_network_add")),
- "review_url": "%s%s" %
- (settings.BASE_URL,
- urlresolvers.reverse(
- "admin:peeringdb_server_user_change",
- args=(instance.user.id, ))),
- "approve_url": "%s%s" % (
- settings.BASE_URL,
- urlresolvers.reverse(
- "admin:peeringdb_server_userorgaffiliationrequest_actions",
- args=(instance.id, "approve_and_notify"))),
- "emails": list(set(rdap_data["emails"])),
- "rdap_lookup": rdap_lookup
- }), instance.user)
+ u"User %s wishes to %s %s"
+ % (instance.user.username, request_type, entity_name),
+ loader.get_template("email/notify-pdb-admin-user-affil.txt").render(
+ {
+ "user": instance.user,
+ "instance": instance,
+ "base_url": settings.BASE_URL,
+ "org_add_url": "%s%s"
+ % (
+ settings.BASE_URL,
+ urlresolvers.reverse(
+ "admin:peeringdb_server_organization_add"
+ ),
+ ),
+ "net_add_url": "%s%s"
+ % (
+ settings.BASE_URL,
+ urlresolvers.reverse("admin:peeringdb_server_network_add"),
+ ),
+ "review_url": "%s%s"
+ % (
+ settings.BASE_URL,
+ urlresolvers.reverse(
+ "admin:peeringdb_server_user_change",
+ args=(instance.user.id,),
+ ),
+ ),
+ "approve_url": "%s%s"
+ % (
+ settings.BASE_URL,
+ urlresolvers.reverse(
+ "admin:peeringdb_server_userorgaffiliationrequest_actions",
+ args=(instance.id, "approve_and_notify"),
+ ),
+ ),
+ "emails": list(set(rdap_data["emails"])),
+ "rdap_lookup": rdap_lookup,
+ }
+ ),
+ instance.user,
+ )
elif instance.status == "approved" and instance.org_id:
@@ -312,7 +356,8 @@ def verification_queue_update(sender, instance, **kwargs):
try:
VerificationQueueItem.objects.get(
content_type=ContentType.objects.get_for_model(sender),
- object_id=instance.id)
+ object_id=instance.id,
+ )
except VerificationQueueItem.DoesNotExist:
q = VerificationQueueItem(item=instance)
q.save()
@@ -320,7 +365,8 @@ def verification_queue_update(sender, instance, **kwargs):
try:
q = VerificationQueueItem.objects.get(
content_type=ContentType.objects.get_for_model(sender),
- object_id=instance.id)
+ object_id=instance.id,
+ )
q.delete()
except VerificationQueueItem.DoesNotExist:
pass
@@ -329,7 +375,8 @@ def verification_queue_delete(sender, instance, **kwargs):
try:
q = VerificationQueueItem.objects.get(
content_type=ContentType.objects.get_for_model(sender),
- object_id=instance.id)
+ object_id=instance.id,
+ )
q.delete()
except VerificationQueueItem.DoesNotExist:
pass
@@ -347,7 +394,8 @@ def verification_queue_notify(sender, instance, **kwargs):
user = instance.user
if type(item) in QUEUE_NOTIFY and not getattr(
- settings, "DISABLE_VERIFICATION_QUEUE_EMAILS", False):
+ settings, "DISABLE_VERIFICATION_QUEUE_EMAILS", False
+ ):
if type(item) == Network:
rdap = RdapLookup().get_asn(item.asn)
@@ -361,15 +409,19 @@ def verification_queue_notify(sender, instance, **kwargs):
ticket_queue(
title,
- loader.get_template('email/notify-pdb-admin-vq.txt').render({
- "entity_type_name": str(instance.content_type),
- "suggested": is_suggested(item),
- "item": item,
- "user": user,
- "rdap": rdap,
- "edit_url": "%s%s" % (settings.BASE_URL,
- instance.item_admin_url)
- }), instance.user)
+ loader.get_template("email/notify-pdb-admin-vq.txt").render(
+ {
+ "entity_type_name": str(instance.content_type),
+ "suggested": is_suggested(item),
+ "item": item,
+ "user": user,
+ "rdap": rdap,
+ "edit_url": "%s%s"
+ % (settings.BASE_URL, instance.item_admin_url),
+ }
+ ),
+ instance.user,
+ )
instance.notified = True
instance.save()
@@ -382,9 +434,10 @@ def verification_queue_notify(sender, instance, **kwargs):
def cors_allow_api_get_to_everyone(sender, request, **kwargs):
- #FIXME: path name to look for should come from config
- return ((request.path == "/api" or request.path.startswith('/api/'))
- and request.method in ["GET", "OPTIONS"])
+ # FIXME: path name to look for should come from config
+ return (
+ request.path == "/api" or request.path.startswith("/api/")
+ ) and request.method in ["GET", "OPTIONS"]
check_request_enabled.connect(cors_allow_api_get_to_everyone)
diff --git a/peeringdb_server/stats.py b/peeringdb_server/stats.py
index 707a4eba..5e9c8bea 100644
--- a/peeringdb_server/stats.py
+++ b/peeringdb_server/stats.py
@@ -2,18 +2,24 @@
load and maintain global stats
"""
-from peeringdb_server.models import (Network, InternetExchange, Facility,
- NetworkIXLan, NetworkFacility)
+from peeringdb_server.models import (
+ Network,
+ InternetExchange,
+ Facility,
+ NetworkIXLan,
+ NetworkFacility,
+)
def stats():
return {
Network.handleref.tag: Network.handleref.filter(status="ok").count(),
- InternetExchange.handleref.tag:
- InternetExchange.handleref.filter(status="ok").count(),
+ InternetExchange.handleref.tag: InternetExchange.handleref.filter(
+ status="ok"
+ ).count(),
Facility.handleref.tag: Facility.handleref.filter(status="ok").count(),
- NetworkIXLan.handleref.tag: NetworkIXLan.handleref.filter(status="ok")
- .count(),
- NetworkFacility.handleref.tag:
- NetworkFacility.handleref.filter(status="ok").count()
+ NetworkIXLan.handleref.tag: NetworkIXLan.handleref.filter(status="ok").count(),
+ NetworkFacility.handleref.tag: NetworkFacility.handleref.filter(
+ status="ok"
+ ).count(),
}
diff --git a/peeringdb_server/templatetags/util.py b/peeringdb_server/templatetags/util.py
index 47aa129d..42e19617 100644
--- a/peeringdb_server/templatetags/util.py
+++ b/peeringdb_server/templatetags/util.py
@@ -2,8 +2,12 @@
from django.utils.translation import ugettext_lazy as _
from django.utils.safestring import mark_safe
import datetime
-from peeringdb_server.models import (InternetExchange, Network, Facility,
- PARTNERSHIP_LEVELS)
+from peeringdb_server.models import (
+ InternetExchange,
+ Network,
+ Facility,
+ PARTNERSHIP_LEVELS,
+)
from peeringdb_server.views import DoNotRender
from peeringdb_server.org_admin_views import permission_ids
@@ -22,10 +26,10 @@
@register.filter
def editable_list_value(row):
- if row.get('value') or row.get('value_label'):
- return _(row.get('value_label', row.get('value')))
- elif row.get('blank') and row.get('value') == "":
- return row.get('blank')
+ if row.get("value") or row.get("value_label"):
+ return _(row.get("value_label", row.get("value")))
+ elif row.get("blank") and row.get("value") == "":
+ return row.get("blank")
return ""
@@ -51,7 +55,7 @@ def org_permission_id_xl(org, id):
@register.filter
def check_perms(v, op):
flg = get_permission_flag(op)
- return (v & flg == flg)
+ return v & flg == flg
@register.filter
@@ -83,10 +87,14 @@ def ownership_warning(org, user):
pass
if not b:
- return mark_safe('{}'.format(
- _("Your email address does not match the domain information we have on file for this organization."
- )))
- return ''
+ return mark_safe(
+ '{}'.format(
+ _(
+ "Your email address does not match the domain information we have on file for this organization."
+ )
+ )
+ )
+ return ""
@register.filter
@@ -140,8 +148,7 @@ def dont_render(value):
@register.filter
def age(dt):
- seconds = (datetime.datetime.now().replace(tzinfo=dt.tzinfo) -
- dt).total_seconds()
+ seconds = (datetime.datetime.now().replace(tzinfo=dt.tzinfo) - dt).total_seconds()
if seconds < 60:
return "%d %s" % (seconds, _("seconds ago"))
elif seconds < 3600:
@@ -168,13 +175,13 @@ def ref_tag(value):
@register.filter
def pretty_speed(value):
if not value:
- return ''
+ return ""
try:
value = int(value)
if value >= 1000000:
- return "%dT" % (value / 10**6)
+ return "%dT" % (value / 10 ** 6)
elif value >= 1000:
- return "%dG" % (value / 10**3)
+ return "%dG" % (value / 10 ** 3)
else:
return "%dM" % value
except ValueError:
@@ -215,5 +222,5 @@ def render_markdown(value):
"a",
]
return bleach.clean(
- markdown.markdown(value), tags=markdown_tags,
- protocols=['http', 'https'])
+ markdown.markdown(value), tags=markdown_tags, protocols=["http", "https"]
+ )
diff --git a/peeringdb_server/urls.py b/peeringdb_server/urls.py
index 35501f8e..f5680b6d 100644
--- a/peeringdb_server/urls.py
+++ b/peeringdb_server/urls.py
@@ -4,14 +4,20 @@
import peeringdb_server.rest
-from peeringdb_server.models import (InternetExchange, Network, Facility,
- Organization)
+from peeringdb_server.models import InternetExchange, Network, Facility, Organization
from peeringdb_server.autocomplete_views import (
- FacilityAutocompleteForNetwork, FacilityAutocompleteForExchange,
- OrganizationAutocomplete, ExchangeAutocomplete, ExchangeAutocompleteJSON,
- IXLanAutocomplete, FacilityAutocomplete, FacilityAutocompleteJSON,
- DeletedVersionAutocomplete, clt_history)
+ FacilityAutocompleteForNetwork,
+ FacilityAutocompleteForExchange,
+ OrganizationAutocomplete,
+ ExchangeAutocomplete,
+ ExchangeAutocompleteJSON,
+ IXLanAutocomplete,
+ FacilityAutocomplete,
+ FacilityAutocompleteJSON,
+ DeletedVersionAutocomplete,
+ clt_history,
+)
from peeringdb_server.export_views import (
view_export_ixf_ix_members,
@@ -69,131 +75,155 @@
import peeringdb_server.data_views
urlpatterns = [
- url(r'^api_search$', request_api_search),
- url(r'^search$', request_search),
- url(r'^advanced_search', view_advanced_search),
- url(r'^auth$', request_login),
- url(r'^logout$', request_logout),
- url(r'^login$', view_login),
- url(r'^register$', view_registration),
- url(r'^reset-password$', view_password_reset),
- url(r'^change-password$', view_password_change),
- url(r'^set-user-locale$', view_set_user_locale),
- url(r'^username-retrieve/initiate$', view_username_retrieve_initiate),
- url(r'^username-retrieve/complete$', view_username_retrieve_complete),
- url(r'^username-retrieve$', view_username_retrieve),
- url(r'^verify$', view_verify),
- url(r'^profile$', view_profile),
- url(r'^profile/v1$', view_profile_v1),
- url(r'^resend_email_confirmation$', resend_confirmation_mail),
- url(r'^sponsors$', view_sponsorships),
- #url(r'^partners$', view_partnerships),
- url(r'^aup$', view_aup),
- url(r'^about$', view_about),
- url(r'^affiliate-to-org$', view_affiliate_to_org),
- url(r'^request-ownership$', view_request_ownership),
- url(r'^%s/(?P\d+)/?$' % Network.handleref.tag, view_network),
- url(r'^%s/(?P\d+)/?$' % InternetExchange.handleref.tag, view_exchange),
- url(r'^%s/(?P\d+)/?$' % Facility.handleref.tag, view_facility),
- url(r'^%s/(?P\d+)/?$' % Organization.handleref.tag, view_organization),
- url(r'^%s$' % Network.handleref.tag, view_network_by_query),
- url(r'^asn/(?P\d+)/?$', view_network_by_asn),
- url(r'^org_admin/users$', peeringdb_server.org_admin_views.users),
- url(r'^org_admin/user_permissions$',
- peeringdb_server.org_admin_views.user_permissions),
- url(r'^org_admin/user_permissions/update$',
- peeringdb_server.org_admin_views.user_permission_update),
- url(r'^org_admin/user_permissions/remove$',
- peeringdb_server.org_admin_views.user_permission_remove),
- url(r'^org_admin/permissions$',
- peeringdb_server.org_admin_views.permissions),
- url(r'^org_admin/uoar/approve$',
- peeringdb_server.org_admin_views.uoar_approve),
- url(r'^org_admin/uoar/deny$', peeringdb_server.org_admin_views.uoar_deny),
- url(r'^org_admin/manage_user/update$',
- peeringdb_server.org_admin_views.manage_user_update),
- url(r'^org_admin/manage_user/delete$',
- peeringdb_server.org_admin_views.manage_user_delete),
- url(r'^data/countries$', peeringdb_server.data_views.countries),
- url(r'^data/sponsors$', peeringdb_server.data_views.sponsorships),
- url(r'^data/countries_b$', peeringdb_server.data_views.countries_w_blank),
- url(r'^data/facilities$', peeringdb_server.data_views.facilities),
- url(r'^data/enum/(?P[\w_]+)$', peeringdb_server.data_views.enum),
- url(r'^data/asns$', peeringdb_server.data_views.asns),
- url(r'^data/organizations$', peeringdb_server.data_views.organizations),
- url(r'^data/locales$', peeringdb_server.data_views.languages),
- url(r'^export/ix/(?P\d+)/ixp-member-list$',
- view_export_ixf_ix_members),
- url(r'^export/ixlan/(?P\d+)/ixp-member-list$',
- view_export_ixf_ixlan_members),
- url(r'^export/advanced-search/(?P[\w_]+)/(?P[\w_-]+)$',
- AdvancedSearchExportView.as_view()),
- url(r'^import/ixlan/(?P\d+)/ixf/preview$',
- view_import_ixlan_ixf_preview),
- url(r'^import/net/(?P\d+)/ixf/postmortem$',
- view_import_net_ixf_postmortem),
- url(r'^import/net/(?P\d+)/ixf/preview$',
- view_import_net_ixf_preview),
- url(r'^$', view_index),
- url(r'^i18n/', include('django.conf.urls.i18n')),
- url('jsi18n/', JavaScriptCatalog.as_view(), name='javascript-catalog'),
- url(r'^(net|ix|fac|org|asn)/translate$', request_translation),
- url(r'^suggest/(?Pfac)$', view_suggest),
- url(r'^maintenance$', view_maintenance, name="maintenance")
+ url(r"^api_search$", request_api_search),
+ url(r"^search$", request_search),
+ url(r"^advanced_search", view_advanced_search),
+ url(r"^auth$", request_login),
+ url(r"^logout$", request_logout),
+ url(r"^login$", view_login),
+ url(r"^register$", view_registration),
+ url(r"^reset-password$", view_password_reset),
+ url(r"^change-password$", view_password_change),
+ url(r"^set-user-locale$", view_set_user_locale),
+ url(r"^username-retrieve/initiate$", view_username_retrieve_initiate),
+ url(r"^username-retrieve/complete$", view_username_retrieve_complete),
+ url(r"^username-retrieve$", view_username_retrieve),
+ url(r"^verify$", view_verify),
+ url(r"^profile$", view_profile),
+ url(r"^profile/v1$", view_profile_v1),
+ url(r"^resend_email_confirmation$", resend_confirmation_mail),
+ url(r"^sponsors$", view_sponsorships),
+ # url(r'^partners$', view_partnerships),
+ url(r"^aup$", view_aup),
+ url(r"^about$", view_about),
+ url(r"^affiliate-to-org$", view_affiliate_to_org),
+ url(r"^request-ownership$", view_request_ownership),
+ url(r"^%s/(?P\d+)/?$" % Network.handleref.tag, view_network),
+ url(r"^%s/(?P\d+)/?$" % InternetExchange.handleref.tag, view_exchange),
+ url(r"^%s/(?P\d+)/?$" % Facility.handleref.tag, view_facility),
+ url(r"^%s/(?P\d+)/?$" % Organization.handleref.tag, view_organization),
+ url(r"^%s$" % Network.handleref.tag, view_network_by_query),
+ url(r"^asn/(?P\d+)/?$", view_network_by_asn),
+ url(r"^org_admin/users$", peeringdb_server.org_admin_views.users),
+ url(
+ r"^org_admin/user_permissions$",
+ peeringdb_server.org_admin_views.user_permissions,
+ ),
+ url(
+ r"^org_admin/user_permissions/update$",
+ peeringdb_server.org_admin_views.user_permission_update,
+ ),
+ url(
+ r"^org_admin/user_permissions/remove$",
+ peeringdb_server.org_admin_views.user_permission_remove,
+ ),
+ url(r"^org_admin/permissions$", peeringdb_server.org_admin_views.permissions),
+ url(r"^org_admin/uoar/approve$", peeringdb_server.org_admin_views.uoar_approve),
+ url(r"^org_admin/uoar/deny$", peeringdb_server.org_admin_views.uoar_deny),
+ url(
+ r"^org_admin/manage_user/update$",
+ peeringdb_server.org_admin_views.manage_user_update,
+ ),
+ url(
+ r"^org_admin/manage_user/delete$",
+ peeringdb_server.org_admin_views.manage_user_delete,
+ ),
+ url(r"^data/countries$", peeringdb_server.data_views.countries),
+ url(r"^data/sponsors$", peeringdb_server.data_views.sponsorships),
+ url(r"^data/countries_b$", peeringdb_server.data_views.countries_w_blank),
+ url(r"^data/facilities$", peeringdb_server.data_views.facilities),
+ url(r"^data/enum/(?P[\w_]+)$", peeringdb_server.data_views.enum),
+ url(r"^data/asns$", peeringdb_server.data_views.asns),
+ url(r"^data/organizations$", peeringdb_server.data_views.organizations),
+ url(r"^data/locales$", peeringdb_server.data_views.languages),
+ url(r"^export/ix/(?P\d+)/ixp-member-list$", view_export_ixf_ix_members),
+ url(
+ r"^export/ixlan/(?P\d+)/ixp-member-list$",
+ view_export_ixf_ixlan_members,
+ ),
+ url(
+ r"^export/advanced-search/(?P[\w_]+)/(?P[\w_-]+)$",
+ AdvancedSearchExportView.as_view(),
+ ),
+ url(r"^import/ixlan/(?P\d+)/ixf/preview$", view_import_ixlan_ixf_preview),
+ url(r"^import/net/(?P\d+)/ixf/postmortem$", view_import_net_ixf_postmortem),
+ url(r"^import/net/(?P\d+)/ixf/preview$", view_import_net_ixf_preview),
+ url(r"^$", view_index),
+ url(r"^i18n/", include("django.conf.urls.i18n")),
+ url("jsi18n/", JavaScriptCatalog.as_view(), name="javascript-catalog"),
+ url(r"^(net|ix|fac|org|asn)/translate$", request_translation),
+ url(r"^suggest/(?Pfac)$", view_suggest),
+ url(r"^maintenance$", view_maintenance, name="maintenance"),
]
# o
# REST API
urlpatterns += [
- url(r'^api/', include(peeringdb_server.rest.urls)),
- url(r'^api-auth/',
- include('rest_framework.urls', namespace='rest_framework')),
- url(r'^apidocs/', get_swagger_view(title="PeeringDB API")),
+ url(r"^api/", include(peeringdb_server.rest.urls)),
+ url(r"^api-auth/", include("rest_framework.urls", namespace="rest_framework")),
+ url(r"^apidocs/", get_swagger_view(title="PeeringDB API")),
]
# AUTOCOMPLETE
urlpatterns += [
- url(r'^autocomplete/fac/net/(?P\d+)/$',
- FacilityAutocompleteForNetwork.as_view(), name="autocomplete-fac-net"),
- url(r'^autocomplete/fac/ix/(?P\d+)/$',
- FacilityAutocompleteForExchange.as_view(), name="autocomplete-fac-ix"),
- url(r'^autocomplete/org/$', OrganizationAutocomplete.as_view(),
- name="autocomplete-org"),
- url(r'^autocomplete/ix/json$', ExchangeAutocompleteJSON.as_view(),
- name="autocomplete-ix-json"),
- url(r'^autocomplete/ix$', ExchangeAutocomplete.as_view(),
- name="autocomplete-ix"),
- url(r'^autocomplete/fac/json$', FacilityAutocompleteJSON.as_view(),
- name="autocomplete-fac-json"),
- url(r'^autocomplete/fac$', FacilityAutocomplete.as_view(),
- name="autocomplete-fac"),
- url(r'^autocomplete/ixlan/$', IXLanAutocomplete.as_view(),
- name="autocomplete-ixlan"),
- url(r'^autocomplete/admin/deletedversions$', DeletedVersionAutocomplete.as_view(),
- name="autocomplete-admin-deleted-versions"),
+ url(
+ r"^autocomplete/fac/net/(?P\d+)/$",
+ FacilityAutocompleteForNetwork.as_view(),
+ name="autocomplete-fac-net",
+ ),
+ url(
+ r"^autocomplete/fac/ix/(?P\d+)/$",
+ FacilityAutocompleteForExchange.as_view(),
+ name="autocomplete-fac-ix",
+ ),
+ url(
+ r"^autocomplete/org/$",
+ OrganizationAutocomplete.as_view(),
+ name="autocomplete-org",
+ ),
+ url(
+ r"^autocomplete/ix/json$",
+ ExchangeAutocompleteJSON.as_view(),
+ name="autocomplete-ix-json",
+ ),
+ url(r"^autocomplete/ix$", ExchangeAutocomplete.as_view(), name="autocomplete-ix"),
+ url(
+ r"^autocomplete/fac/json$",
+ FacilityAutocompleteJSON.as_view(),
+ name="autocomplete-fac-json",
+ ),
+ url(r"^autocomplete/fac$", FacilityAutocomplete.as_view(), name="autocomplete-fac"),
+ url(
+ r"^autocomplete/ixlan/$", IXLanAutocomplete.as_view(), name="autocomplete-ixlan"
+ ),
+ url(
+ r"^autocomplete/admin/deletedversions$",
+ DeletedVersionAutocomplete.as_view(),
+ name="autocomplete-admin-deleted-versions",
+ ),
]
# Admin autocomplete for commandlinetool history
urlpatterns += [
- url(r'^autocomplete/admin/clt-history/{}/$'.format(tool_id),
+ url(
+ r"^autocomplete/admin/clt-history/{}/$".format(tool_id),
ToolHistory.as_view(),
- name="autocomplete-admin-clt-history-{}".format(tool_id))
+ name="autocomplete-admin-clt-history-{}".format(tool_id),
+ )
for tool_id, ToolHistory in clt_history.items()
]
# Oauth2
urlpatterns += [
- url(r'^oauth2/',
- include('oauth2_provider.urls', namespace='oauth2_provider')),
+ url(r"^oauth2/", include("oauth2_provider.urls", namespace="oauth2_provider")),
]
# DEBUG
if settings.DEBUG:
import debug_toolbar
- urlpatterns = [
- url(r'^__debug__/', include(debug_toolbar.urls)),
- ] + urlpatterns
+
+ urlpatterns = [url(r"^__debug__/", include(debug_toolbar.urls)),] + urlpatterns
diff --git a/peeringdb_server/validators.py b/peeringdb_server/validators.py
index 581b9e73..91208488 100644
--- a/peeringdb_server/validators.py
+++ b/peeringdb_server/validators.py
@@ -11,6 +11,7 @@
from peeringdb_server.inet import network_is_pdb_valid
import peeringdb_server.models
+
def validate_prefix(prefix):
"""
validate ip prefix
@@ -49,21 +50,30 @@ def validate_address_space(prefix):
if not network_is_pdb_valid(prefix):
raise ValidationError(_("Address space invalid: {}").format(prefix))
- prefixlen_min = getattr(settings, "DATA_QUALITY_MIN_PREFIXLEN_V{}".format(prefix.version))
- prefixlen_max = getattr(settings, "DATA_QUALITY_MAX_PREFIXLEN_V{}".format(prefix.version))
+ prefixlen_min = getattr(
+ settings, "DATA_QUALITY_MIN_PREFIXLEN_V{}".format(prefix.version)
+ )
+ prefixlen_max = getattr(
+ settings, "DATA_QUALITY_MAX_PREFIXLEN_V{}".format(prefix.version)
+ )
if prefix.prefixlen < prefixlen_min:
raise ValidationError(
- _("Maximum allowed prefix length is {}").format(prefixlen_min))
+ _("Maximum allowed prefix length is {}").format(prefixlen_min)
+ )
elif prefix.prefixlen > prefixlen_max:
raise ValidationError(
- _("Minimum allowed prefix length is {}").format(prefixlen_max))
+ _("Minimum allowed prefix length is {}").format(prefixlen_max)
+ )
+
def validate_info_prefixes4(value):
if value > settings.DATA_QUALITY_MAX_PREFIX_V4_LIMIT:
raise ValidationError(
_("Maximum value allowed {}").format(
- settings.DATA_QUALITY_MAX_PREFIX_V4_LIMIT))
+ settings.DATA_QUALITY_MAX_PREFIX_V4_LIMIT
+ )
+ )
if value < 0:
raise ValidationError(_("Negative value not allowed"))
@@ -72,7 +82,9 @@ def validate_info_prefixes6(value):
if value > settings.DATA_QUALITY_MAX_PREFIX_V6_LIMIT:
raise ValidationError(
_("Maximum value allowed {}").format(
- settings.DATA_QUALITY_MAX_PREFIX_V6_LIMIT))
+ settings.DATA_QUALITY_MAX_PREFIX_V6_LIMIT
+ )
+ )
if value < 0:
raise ValidationError(_("Negative value not allowed"))
@@ -90,15 +102,17 @@ def validate_prefix_overlap(prefix):
- ValidationError on failed validation
"""
-
prefix = validate_prefix(prefix)
- qs = peeringdb_server.models.IXLanPrefix.objects.filter(protocol="IPv{}".format(prefix.version),
- status="ok")
+ qs = peeringdb_server.models.IXLanPrefix.objects.filter(
+ protocol="IPv{}".format(prefix.version), status="ok"
+ )
qs = qs.exclude(prefix=prefix)
for ixpfx in qs:
if ixpfx.prefix.overlaps(prefix):
- raise ValidationError(_("Prefix overlaps with {}'s prefix: {}".format(
- ixpfx.ixlan.ix.name, ixpfx.prefix
- )))
-
-
+ raise ValidationError(
+ _(
+ "Prefix overlaps with {}'s prefix: {}".format(
+ ixpfx.ixlan.ix.name, ixpfx.prefix
+ )
+ )
+ )
diff --git a/peeringdb_server/views.py b/peeringdb_server/views.py
index 765e8046..fc098be1 100644
--- a/peeringdb_server/views.py
+++ b/peeringdb_server/views.py
@@ -5,9 +5,14 @@
import uuid
from allauth.account.models import EmailAddress
-from django.http import (JsonResponse, HttpResponse, HttpResponseRedirect,
- HttpResponseNotFound, HttpResponseBadRequest,
- HttpResponseForbidden)
+from django.http import (
+ JsonResponse,
+ HttpResponse,
+ HttpResponseRedirect,
+ HttpResponseNotFound,
+ HttpResponseBadRequest,
+ HttpResponseForbidden,
+)
from django.conf import settings as dj_settings
from django.shortcuts import render
from django.core.exceptions import ObjectDoesNotExist, MultipleObjectsReturned
@@ -40,15 +45,36 @@
from peeringdb_server.org_admin_views import load_all_user_permissions
from peeringdb_server.data_views import BOOL_CHOICE
from peeringdb_server.models import (
- UserOrgAffiliationRequest, User, UserPasswordReset, Organization, Network,
- NetworkFacility, NetworkIXLan, InternetExchange, InternetExchangeFacility,
- Facility, Sponsorship, Partnership, PARTNERSHIP_LEVELS, REFTAG_MAP, UTC)
-from peeringdb_server.forms import (UserCreationForm, PasswordResetForm,
- PasswordChangeForm, AffiliateToOrgForm,
- UsernameRetrieveForm, UserLocaleForm)
+ UserOrgAffiliationRequest,
+ User,
+ UserPasswordReset,
+ Organization,
+ Network,
+ NetworkFacility,
+ NetworkIXLan,
+ InternetExchange,
+ InternetExchangeFacility,
+ Facility,
+ Sponsorship,
+ Partnership,
+ PARTNERSHIP_LEVELS,
+ REFTAG_MAP,
+ UTC,
+)
+from peeringdb_server.forms import (
+ UserCreationForm,
+ PasswordResetForm,
+ PasswordChangeForm,
+ AffiliateToOrgForm,
+ UsernameRetrieveForm,
+ UserLocaleForm,
+)
from peeringdb_server.serializers import (
- OrganizationSerializer, NetworkSerializer, InternetExchangeSerializer,
- FacilitySerializer)
+ OrganizationSerializer,
+ NetworkSerializer,
+ InternetExchangeSerializer,
+ FacilitySerializer,
+)
from peeringdb_server.inet import RdapLookup, RdapException
from peeringdb_server.mail import mail_username_retrieve
from peeringdb_server.deskpro import ticket_queue_rdap_error
@@ -60,14 +86,15 @@
RATELIMITS = dj_settings.RATELIMITS
from django.utils.translation import ugettext_lazy as _
+
# lazy init for translations
-#_ = lambda s: s
+# _ = lambda s: s
BASE_ENV = {
- 'RECAPTCHA_PUBLIC_KEY': dj_settings.RECAPTCHA_PUBLIC_KEY,
- 'OAUTH_ENABLED': dj_settings.OAUTH_ENABLED,
- 'PEERINGDB_VERSION': settings.PEERINGDB_VERSION,
- 'TUTORIAL_MODE': settings.TUTORIAL_MODE
+ "RECAPTCHA_PUBLIC_KEY": dj_settings.RECAPTCHA_PUBLIC_KEY,
+ "OAUTH_ENABLED": dj_settings.OAUTH_ENABLED,
+ "PEERINGDB_VERSION": settings.PEERINGDB_VERSION,
+ "TUTORIAL_MODE": settings.TUTORIAL_MODE,
}
@@ -90,7 +117,7 @@ def export_permissions(user, entity):
perms = {
"can_write": has_perms(user, entity, PERM_WRITE),
"can_create": has_perms(user, entity, PERM_CREATE),
- "can_delete": has_perms(user, entity, PERM_DELETE)
+ "can_delete": has_perms(user, entity, PERM_DELETE),
}
if entity.status == "pending":
@@ -101,8 +128,7 @@ def export_permissions(user, entity):
perms["can_edit"] = True
if hasattr(entity, "nsp_namespace_manage"):
- perms["can_manage"] = has_perms(user, entity.nsp_namespace_manage,
- PERM_CRUD)
+ perms["can_manage"] = has_perms(user, entity.nsp_namespace_manage, PERM_CRUD)
else:
perms["can_manage"] = False
@@ -123,35 +149,33 @@ def all(self):
def make_env(**data):
env = {}
env.update(**BASE_ENV)
- env.update(**{'global_stats': global_stats()})
+ env.update(**{"global_stats": global_stats()})
env.update(**data)
return env
-
def view_http_error_404(request):
- template = loader.get_template('site/error_404.html')
+ template = loader.get_template("site/error_404.html")
return HttpResponseNotFound(template.render(make_env(), request))
def view_http_error_403(request):
- template = loader.get_template('site/error_403.html')
+ template = loader.get_template("site/error_403.html")
return HttpResponseForbidden(template.render(make_env(), request))
def view_http_error_csrf(request, reason):
return JsonResponse({"non_field_errors": [reason]}, status=403)
+
def view_maintenance(request):
- template = loader.get_template('site/maintenance.html')
+ template = loader.get_template("site/maintenance.html")
return HttpResponse(template.render({}, request), status=503)
@login_required
-@ratelimit(key="ip", rate=RATELIMITS["view_request_ownership_GET"],
- method="GET")
-@ratelimit(key="ip", rate=RATELIMITS["view_request_ownership_POST"],
- method="POST")
+@ratelimit(key="ip", rate=RATELIMITS["view_request_ownership_GET"], method="GET")
+@ratelimit(key="ip", rate=RATELIMITS["view_request_ownership_POST"], method="POST")
def view_request_ownership(request):
"""
Renders the form that allows users to request ownership
@@ -164,9 +188,10 @@ def view_request_ownership(request):
# check if reuqest was blocked by rate limiting
if was_limited:
- return view_index(request, errors=[
- _("Please wait a bit before requesting ownership again.")
- ])
+ return view_index(
+ request,
+ errors=[_("Please wait a bit before requesting ownership again.")],
+ )
org_id = request.GET.get("id")
try:
@@ -175,12 +200,13 @@ def view_request_ownership(request):
return view_index(request, errors=[_("Invalid organization")])
if org.owned:
- return view_index(request, errors=[
- _(u"Organization '%(org_name)s' is already under ownership") %
- {
- 'org_name': org.name
- }
- ])
+ return view_index(
+ request,
+ errors=[
+ _(u"Organization '%(org_name)s' is already under ownership")
+ % {"org_name": org.name}
+ ],
+ )
template = loader.get_template("site/request-ownership.html")
return HttpResponse(template.render(make_env(org=org), request))
@@ -191,47 +217,56 @@ def view_request_ownership(request):
# check if reuqest was blocked by rate limiting
if was_limited:
- return JsonResponse({
- "non_field_errors": [
- _("Please wait a bit before requesting ownership again.")
- ]
- }, status=400)
+ return JsonResponse(
+ {
+ "non_field_errors": [
+ _("Please wait a bit before requesting ownership again.")
+ ]
+ },
+ status=400,
+ )
try:
org = Organization.objects.get(id=org_id)
except Organization.DoesNotExist:
- return JsonResponse({
- "non_field_errors": [_("Organization does not exist")]
- }, status=400)
+ return JsonResponse(
+ {"non_field_errors": [_("Organization does not exist")]}, status=400
+ )
if org.owned:
- return JsonResponse({
- "non_field_errors": [
- _("Organization '%(org_name)s' is already under ownership")
- % {
- 'org_name': org.name
- }
- ]
- }, status=400)
-
- if UserOrgAffiliationRequest.objects.filter(user=request.user,
- org=org).exists():
- return JsonResponse({
- "non_field_errors": [
- _("You already have an ownership request pending for this organization"
- )
- ]
- }, status=400)
+ return JsonResponse(
+ {
+ "non_field_errors": [
+ _("Organization '%(org_name)s' is already under ownership")
+ % {"org_name": org.name}
+ ]
+ },
+ status=400,
+ )
+
+ if UserOrgAffiliationRequest.objects.filter(
+ user=request.user, org=org
+ ).exists():
+ return JsonResponse(
+ {
+ "non_field_errors": [
+ _(
+ "You already have an ownership request pending for this organization"
+ )
+ ]
+ },
+ status=400,
+ )
uoar = UserOrgAffiliationRequest.objects.create(
- user=request.user, org=org, status="pending")
+ user=request.user, org=org, status="pending"
+ )
return JsonResponse({"status": "ok", "ownership_status": uoar.status})
@csrf_protect
@ensure_csrf_cookie
-@ratelimit(key="ip", method="POST",
- rate=RATELIMITS["view_affiliate_to_org_POST"])
+@ratelimit(key="ip", method="POST", rate=RATELIMITS["view_affiliate_to_org_POST"])
def view_affiliate_to_org(request):
"""
Allows the user to request affiliation with an organization through
@@ -246,42 +281,54 @@ def view_affiliate_to_org(request):
# check if request was blocked by rate limiting
was_limited = getattr(request, "limited", False)
if was_limited:
- return JsonResponse({
- "non_field_errors": [
- _("Please wait a bit before requesting affiliation again.")
- ]
- }, status=400)
+ return JsonResponse(
+ {
+ "non_field_errors": [
+ _("Please wait a bit before requesting affiliation again.")
+ ]
+ },
+ status=400,
+ )
form = AffiliateToOrgForm(request.POST)
if not form.is_valid():
return JsonResponse(form.errors, status=400)
- if not form.cleaned_data.get("org") and not form.cleaned_data.get(
- "asn") and not form.cleaned_data.get("org_name"):
- return JsonResponse({
- "asn": _("Either ASN or Organization required"),
- "org": _("Either ASN or Organization required")
- }, status=400)
+ if (
+ not form.cleaned_data.get("org")
+ and not form.cleaned_data.get("asn")
+ and not form.cleaned_data.get("org_name")
+ ):
+ return JsonResponse(
+ {
+ "asn": _("Either ASN or Organization required"),
+ "org": _("Either ASN or Organization required"),
+ },
+ status=400,
+ )
asn = form.cleaned_data.get("asn")
# remove all deleted uoars for user
- UserOrgAffiliationRequest.objects.filter(user=request.user,
- status="denied").delete()
+ UserOrgAffiliationRequest.objects.filter(
+ user=request.user, status="denied"
+ ).delete()
try:
uoar, created = UserOrgAffiliationRequest.objects.get_or_create(
- user=request.user, asn=form.cleaned_data.get("asn"),
+ user=request.user,
+ asn=form.cleaned_data.get("asn"),
org_id=form.cleaned_data.get("org") or None,
org_name=form.cleaned_data.get("org_name") or None,
- status="pending")
+ status="pending",
+ )
except RdapException as exc:
ticket_queue_rdap_error(request.user, asn, exc)
- return JsonResponse({
- "asn": _("RDAP Lookup Error: {}").format(exc)
- }, status=400)
+ return JsonResponse(
+ {"asn": _("RDAP Lookup Error: {}").format(exc)}, status=400
+ )
except MultipleObjectsReturned:
pass
@@ -293,21 +340,24 @@ def view_affiliate_to_org(request):
@csrf_protect
@ensure_csrf_cookie
-@ratelimit(key='ip', rate=RATELIMITS["resend_confirmation_mail"])
+@ratelimit(key="ip", rate=RATELIMITS["resend_confirmation_mail"])
def resend_confirmation_mail(request):
- was_limited = getattr(request, 'limited', False)
+ was_limited = getattr(request, "limited", False)
if was_limited:
- return view_index(request, errors=[
- _("Please wait a bit before trying to resend the confirmation email again"
- )
- ])
+ return view_index(
+ request,
+ errors=[
+ _(
+ "Please wait a bit before trying to resend the confirmation email again"
+ )
+ ],
+ )
if not request.user.is_authenticated():
return view_login(request)
request.user.send_email_confirmation(request=request)
- return view_index(request,
- errors=[_("We have resent your confirmation email")])
+ return view_index(request, errors=[_("We have resent your confirmation email")])
@csrf_protect
@@ -335,24 +385,24 @@ def view_set_user_locale(request):
request.user.set_locale(loc)
from django.utils import translation
+
translation.activate(loc)
request.session[translation.LANGUAGE_SESSION_KEY] = loc
return JsonResponse({"status": "ok"})
-@protected_resource(scopes=['profile'])
+@protected_resource(scopes=["profile"])
def view_profile_v1(request):
# if not request.user.is_authenticated():
# return view_login(request)
oauth = get_oauthlib_core()
scope_email, _request = oauth.verify_request(request, scopes=["email"])
- scope_networks, _request = oauth.verify_request(request,
- scopes=["networks"])
+ scope_networks, _request = oauth.verify_request(request, scopes=["networks"])
json_params = {}
if "pretty" in request.GET:
- json_params['indent'] = 2
+ json_params["indent"] = 2
user = request.user
data = dict(
@@ -366,34 +416,25 @@ def view_profile_v1(request):
# only add email fields if email scope is present
if scope_email:
data.update(
- dict(
- email=request.user.email,
- verified_email=user.email_confirmed,
- ))
+ dict(email=request.user.email, verified_email=user.email_confirmed,)
+ )
# only add ddnetworks if networks scope is present
if scope_networks:
networks = []
load_perms(user)
for net in user.networks:
- crud = get_perms(user._nsp_perms_struct,
- net.nsp_namespace.split(".")).value
- networks.append(
- dict(
- id=net.id,
- name=net.name,
- asn=net.asn,
- perms=crud,
- ))
-
- data['networks'] = networks
+ crud = get_perms(user._nsp_perms_struct, net.nsp_namespace.split(".")).value
+ networks.append(dict(id=net.id, name=net.name, asn=net.asn, perms=crud,))
+
+ data["networks"] = networks
return JsonResponse(data, json_dumps_params=json_params)
@csrf_protect
@ensure_csrf_cookie
-@ratelimit(key='ip', rate=RATELIMITS["view_verify_POST"], method="POST")
+@ratelimit(key="ip", rate=RATELIMITS["view_verify_POST"], method="POST")
def view_verify(request):
if not request.user.is_authenticated():
@@ -402,12 +443,15 @@ def view_verify(request):
if request.method in ["GET", "HEAD"]:
template = loader.get_template("site/verify.html")
env = BASE_ENV.copy()
- env.update({
- 'affiliation_request':
- request.user.affiliation_requests.order_by("-created").first(),
- 'affiliations': request.user.organizations,
- 'global_stats': global_stats(),
- })
+ env.update(
+ {
+ "affiliation_request": request.user.affiliation_requests.order_by(
+ "-created"
+ ).first(),
+ "affiliations": request.user.organizations,
+ "global_stats": global_stats(),
+ }
+ )
return HttpResponse(template.render(env, request))
elif request.method == "POST":
@@ -418,16 +462,17 @@ def view_verify(request):
was_limited = getattr(request, "limited", False)
if was_limited:
- return JsonResponse({
- "non_field_errors": [
- _("Please wait a bit before requesting another email change"
- )
- ]
- }, status=400)
+ return JsonResponse(
+ {
+ "non_field_errors": [
+ _("Please wait a bit before requesting another email change")
+ ]
+ },
+ status=400,
+ )
if not request.user.has_oauth:
- if not authenticate(username=request.user.username,
- password=password):
+ if not authenticate(username=request.user.username, password=password):
return JsonResponse({"status": "auth"}, status=401)
if EmailAddress.objects.filter(user=request.user).exists():
@@ -435,11 +480,14 @@ def view_verify(request):
request.user.email = request.POST.get("email")
- if User.objects.filter(email=request.user.email).exclude(
- id=request.user.id).exists():
- return JsonResponse({
- "email": _("E-mail already exists in our system")
- }, status=400)
+ if (
+ User.objects.filter(email=request.user.email)
+ .exclude(id=request.user.id)
+ .exists()
+ ):
+ return JsonResponse(
+ {"email": _("E-mail already exists in our system")}, status=400
+ )
request.user.clean()
request.user.save()
@@ -462,12 +510,10 @@ def view_password_change(request):
password_c = request.POST.get("password_c")
if not request.user.has_oauth:
- if not authenticate(username=request.user.username,
- password=password_c):
- return JsonResponse({
- "status": "auth",
- "password_c": _("Wrong password")
- }, status=400)
+ if not authenticate(username=request.user.username, password=password_c):
+ return JsonResponse(
+ {"status": "auth", "password_c": _("Wrong password")}, status=400
+ )
else:
return JsonResponse({"status": "auth"}, status=401)
@@ -488,16 +534,16 @@ def view_username_retrieve(request):
username retrieval view
"""
env = BASE_ENV.copy()
- env.update({
- 'global_stats': global_stats(),
- })
+ env.update(
+ {"global_stats": global_stats(),}
+ )
return render(request, "site/username-retrieve.html", env)
@csrf_protect
@ensure_csrf_cookie
@require_http_methods(["POST"])
-@ratelimit(key='ip', rate=RATELIMITS["view_username_retrieve_initiate"])
+@ratelimit(key="ip", rate=RATELIMITS["view_username_retrieve_initiate"])
def view_username_retrieve_initiate(request):
"""
username retrieval initiate view
@@ -505,11 +551,14 @@ def view_username_retrieve_initiate(request):
was_limited = getattr(request, "limited", False)
if was_limited:
- return JsonResponse({
- "non_field_errors": [
- _("Please wait a bit before requesting your usernames again.")
- ]
- }, status=400)
+ return JsonResponse(
+ {
+ "non_field_errors": [
+ _("Please wait a bit before requesting your usernames again.")
+ ]
+ },
+ status=400,
+ )
# clean form and get email address
form = UsernameRetrieveForm(request.POST)
@@ -544,12 +593,14 @@ def view_username_retrieve_complete(request):
secret_expected = request.session.get("username_retrieve_secret")
email = request.session.get("username_retrieve_email")
env = BASE_ENV.copy()
- env.update({
- "secret": secret,
- "secret_expected": secret_expected,
- "users": User.objects.filter(email=email),
- "email": email
- })
+ env.update(
+ {
+ "secret": secret,
+ "secret_expected": secret_expected,
+ "users": User.objects.filter(email=email),
+ "email": email,
+ }
+ )
if secret_expected and constant_time_compare(secret, secret_expected):
# invalidate the username retrieve session
@@ -569,9 +620,9 @@ def view_password_reset(request):
if request.method in ["GET", "HEAD"]:
env = BASE_ENV.copy()
- env.update({
- 'global_stats': global_stats(),
- })
+ env.update(
+ {"global_stats": global_stats(),}
+ )
env["token"] = token = request.GET.get("token")
env["target"] = target = request.GET.get("target")
@@ -601,33 +652,30 @@ def view_password_reset(request):
err_invalid_token_msg = _("Invalid Security Token")
err_expired_msg = ('{} {}').format(
- _("Password Reset Process has expired, please"),
- _("initiate again"))
+ _("Password Reset Process has expired, please"), _("initiate again")
+ )
if user:
try:
if not user.password_reset.match(token):
- return JsonResponse({
- "non_field_errors": [err_invalid_token_msg]
- }, status=400)
+ return JsonResponse(
+ {"non_field_errors": [err_invalid_token_msg]}, status=400
+ )
if not user.password_reset.is_valid():
- return JsonResponse({
- "non_field_errors": [err_expired_msg]
- }, status=400)
+ return JsonResponse(
+ {"non_field_errors": [err_expired_msg]}, status=400
+ )
except UserPasswordReset.DoesNotExist:
- return JsonResponse({
- "non_field_errors": [err_expired_msg]
- }, status=400)
+ return JsonResponse(
+ {"non_field_errors": [err_expired_msg]}, status=400
+ )
- user.password_reset_complete(token,
- form.cleaned_data.get("password"))
+ user.password_reset_complete(token, form.cleaned_data.get("password"))
else:
- return JsonResponse({
- "non_field_errors": [err_expired_msg]
- }, status=400)
+ return JsonResponse({"non_field_errors": [err_expired_msg]}, status=400)
else:
form = PasswordResetForm(request.POST)
@@ -635,8 +683,7 @@ def view_password_reset(request):
if not form.is_valid():
return JsonResponse(form.errors, status=400)
- user = User.objects.filter(
- email=form.cleaned_data["email"]).first()
+ user = User.objects.filter(email=form.cleaned_data["email"]).first()
if user:
user.password_reset_initiate()
return JsonResponse({"status": "ok"})
@@ -649,18 +696,21 @@ def view_registration(request):
user registration page view
"""
if request.user.is_authenticated():
- return view_index(request, errors=[
- _('Please log out of your current session before trying to register. Notice, multiple accounts are no longer needed.'
- )
- ])
+ return view_index(
+ request,
+ errors=[
+ _(
+ "Please log out of your current session before trying to register. Notice, multiple accounts are no longer needed."
+ )
+ ],
+ )
if request.method in ["GET", "HEAD"]:
template = loader.get_template("site/register.html")
env = BASE_ENV.copy()
- env.update({
- 'global_stats': global_stats(),
- 'register_form' : UserCreationForm(),
- })
+ env.update(
+ {"global_stats": global_stats(), "register_form": UserCreationForm(),}
+ )
return HttpResponse(template.render(env, request))
elif request.method == "POST":
@@ -669,21 +719,21 @@ def view_registration(request):
if not form.is_valid():
errors = form.errors
- errors["non_field_errors"] = errors.get("__all__",[])
+ errors["non_field_errors"] = errors.get("__all__", [])
return JsonResponse(errors, status=400)
email = form.cleaned_data["email"]
if EmailAddress.objects.filter(email=email).count() > 0:
- return JsonResponse({
- "email": _("This email address has already been used")
- }, status=400)
+ return JsonResponse(
+ {"email": _("This email address has already been used")}, status=400
+ )
# require min password length
# FIXME: impl password strength validation
if len(form.cleaned_data["password1"]) < 10:
- return JsonResponse({
- "password1": _("Needs to be at least 10 characters long")
- }, status=400)
+ return JsonResponse(
+ {"password1": _("Needs to be at least 10 characters long")}, status=400
+ )
# create the user
user = form.save()
@@ -691,9 +741,12 @@ def view_registration(request):
user.set_unverified()
# log the user in
- login(request,
- authenticate(username=request.POST["username"],
- password=request.POST["password1"]))
+ login(
+ request,
+ authenticate(
+ username=request.POST["username"], password=request.POST["password1"]
+ ),
+ )
user.send_email_confirmation(signup=True, request=request)
@@ -711,14 +764,14 @@ def view_login(request, errors=None):
errors = []
if request.user.is_authenticated():
- return view_index(request, errors=[_('Already logged in')])
+ return view_index(request, errors=[_("Already logged in")])
- template = loader.get_template('site/login.html')
+ template = loader.get_template("site/login.html")
redir = request.GET.get("next", request.POST.get("next"))
env = BASE_ENV.copy()
- env.update({'errors': errors, "next": redir})
+ env.update({"errors": errors, "next": redir})
return HttpResponse(template.render(env, request))
@@ -730,48 +783,46 @@ def view_index(request, errors=None):
if not errors:
errors = []
- template = loader.get_template('site/index.html')
+ template = loader.get_template("site/index.html")
recent = {
"net": Network.handleref.filter(status="ok").order_by("-updated")[:5],
"fac": Facility.handleref.filter(status="ok").order_by("-updated")[:5],
- "ix": InternetExchange.handleref.filter(status="ok")
- .order_by("-updated")[:5]
+ "ix": InternetExchange.handleref.filter(status="ok").order_by("-updated")[:5],
}
env = BASE_ENV.copy()
- env.update({
- 'errors': errors,
- 'global_stats': global_stats(),
- 'recent': recent
- })
+ env.update({"errors": errors, "global_stats": global_stats(), "recent": recent})
return HttpResponse(template.render(env, request))
-def view_component(request, component, data, title, perms=None, instance=None,
- **kwargs):
+def view_component(
+ request, component, data, title, perms=None, instance=None, **kwargs
+):
"""
Generic component view
"""
if not perms:
perms = {}
- template = loader.get_template('site/view.html')
+ template = loader.get_template("site/view.html")
env = BASE_ENV.copy()
- env.update({
- 'data': data,
- 'permissions': perms,
- 'title': title,
- 'component': component,
- 'instance': instance,
- 'ref_tag': instance._handleref.tag,
- 'global_stats': global_stats(),
- 'asset_template_name': 'site/view_%s_assets.html' % component,
- 'tools_template_name': 'site/view_%s_tools.html' % component,
- 'side_template_name': 'site/view_%s_side.html' % component,
- 'bottom_template_name': 'site/view_%s_bottom.html' % component
- })
+ env.update(
+ {
+ "data": data,
+ "permissions": perms,
+ "title": title,
+ "component": component,
+ "instance": instance,
+ "ref_tag": instance._handleref.tag,
+ "global_stats": global_stats(),
+ "asset_template_name": "site/view_%s_assets.html" % component,
+ "tools_template_name": "site/view_%s_tools.html" % component,
+ "side_template_name": "site/view_%s_side.html" % component,
+ "bottom_template_name": "site/view_%s_bottom.html" % component,
+ }
+ )
env.update(**kwargs)
return HttpResponse(template.render(env, request))
@@ -784,8 +835,8 @@ def view_organization(request, id):
try:
org = OrganizationSerializer.prefetch_related(
- Organization.objects, request, depth=2).get(
- id=id, status__in=["ok", "pending"])
+ Organization.objects, request, depth=2
+ ).get(id=id, status__in=["ok", "pending"])
except ObjectDoesNotExist:
return view_http_error_404(request)
@@ -799,14 +850,14 @@ def view_organization(request, id):
tags = ["fac", "net", "ix"]
for tag in tags:
model = REFTAG_MAP.get(tag)
- perms["can_create_%s" % tag] = has_perms(request.user,
- model.nsp_namespace_from_id(
- org.id, "create"),
- PERM_CREATE)
- perms["can_delete_%s" % tag] = has_perms(request.user,
- model.nsp_namespace_from_id(
- org.id, "_").strip("_"),
- PERM_DELETE)
+ perms["can_create_%s" % tag] = has_perms(
+ request.user, model.nsp_namespace_from_id(org.id, "create"), PERM_CREATE
+ )
+ perms["can_delete_%s" % tag] = has_perms(
+ request.user,
+ model.nsp_namespace_from_id(org.id, "_").strip("_"),
+ PERM_DELETE,
+ )
# if the organization being viewed is the one used
# to store suggested entities, we dont want to show the editorial
@@ -843,50 +894,56 @@ def view_organization(request, id):
"exchanges": exchanges,
"networks": networks,
"facilities": facilities,
- "fields": [{
- "name": "website",
- "type": "url",
- "notify_incomplete": True,
- "value": data.get("website", dismiss),
- "label": _("Website")
- }, {
- "name": "address1",
- "label": _("Address 1"),
- "notify_incomplete": True,
- "value": data.get("address1", dismiss)
- }, {
- "name": "address2",
- "label": _("Address 2"),
- "value": data.get("address2", dismiss)
- }, {
- "name": "location",
- "label": _("Location"),
- "type": "location",
- "notify_incomplete": True,
- "value": data
- }, {
- "name": "country",
- "type": "list",
- "data": "countries_b",
- "label": _("Country Code"),
- "notify_incomplete": True,
- "value": data.get("country", dismiss)
- }, {
- "name": "notes",
- "label": _("Notes"),
- "help_text": _("Markdown enabled"),
- "type": "fmt-text",
- "value": data.get("notes", dismiss)
- }]
+ "fields": [
+ {
+ "name": "website",
+ "type": "url",
+ "notify_incomplete": True,
+ "value": data.get("website", dismiss),
+ "label": _("Website"),
+ },
+ {
+ "name": "address1",
+ "label": _("Address 1"),
+ "notify_incomplete": True,
+ "value": data.get("address1", dismiss),
+ },
+ {
+ "name": "address2",
+ "label": _("Address 2"),
+ "value": data.get("address2", dismiss),
+ },
+ {
+ "name": "location",
+ "label": _("Location"),
+ "type": "location",
+ "notify_incomplete": True,
+ "value": data,
+ },
+ {
+ "name": "country",
+ "type": "list",
+ "data": "countries_b",
+ "label": _("Country Code"),
+ "notify_incomplete": True,
+ "value": data.get("country", dismiss),
+ },
+ {
+ "name": "notes",
+ "label": _("Notes"),
+ "help_text": _("Markdown enabled"),
+ "type": "fmt-text",
+ "value": data.get("notes", dismiss),
+ },
+ ],
}
users = {}
if perms.get("can_manage"):
users.update(
- dict([(user.id, user)
- for user in org.admin_usergroup.user_set.all()]))
- users.update(
- dict([(user.id, user) for user in org.usergroup.user_set.all()]))
+ dict([(user.id, user) for user in org.admin_usergroup.user_set.all()])
+ )
+ users.update(dict([(user.id, user) for user in org.usergroup.user_set.all()]))
users = sorted(users.values(), key=lambda x: x.full_name)
# if user has rights to create sub entties or manage users, allow them
@@ -910,9 +967,16 @@ def view_organization(request, id):
tab_init = {"users": "active"}
return view_component(
- request, "organization", data, "Organization", tab_init=tab_init,
- users=users, user_perms=load_all_user_permissions(org), instance=org,
- perms=perms)
+ request,
+ "organization",
+ data,
+ "Organization",
+ tab_init=tab_init,
+ users=users,
+ user_perms=load_all_user_permissions(org),
+ instance=org,
+ perms=perms,
+ )
@ensure_csrf_cookie
@@ -933,14 +997,21 @@ def view_facility(request, id):
perms = export_permissions(request.user, facility)
- org = OrganizationSerializer(facility.org, context={
- "user": request.user
- }).data
+ org = OrganizationSerializer(facility.org, context={"user": request.user}).data
- exchanges = InternetExchangeFacility.handleref.undeleted().filter(
- facility=facility).select_related("ix").order_by("ix__name").all()
- peers = NetworkFacility.handleref.undeleted().filter(
- facility=facility).select_related("network").order_by("network__name")
+ exchanges = (
+ InternetExchangeFacility.handleref.undeleted()
+ .filter(facility=facility)
+ .select_related("ix")
+ .order_by("ix__name")
+ .all()
+ )
+ peers = (
+ NetworkFacility.handleref.undeleted()
+ .filter(facility=facility)
+ .select_related("network")
+ .order_by("network__name")
+ )
dismiss = DoNotRender()
@@ -948,60 +1019,72 @@ def view_facility(request, id):
"title": data.get("name", dismiss),
"exchanges": exchanges,
"peers": peers,
- "fields": [{
- "name": "org",
- "label": _("Organization"),
- "value": org.get("name", dismiss),
- "type": "entity_link",
- "link": "/%s/%d" % (Organization._handleref.tag, org.get("id"))
- }, {
- "name": "website",
- "type": "url",
- "value": data.get("website", dismiss),
- "label": _("Website")
- }, {
- "name": "address1",
- "label": _("Address 1"),
- "value": data.get("address1", dismiss)
- }, {
- "name": "address2",
- "label": _("Address 2"),
- "value": data.get("address2", dismiss)
- }, {
- "name": "location",
- "label": _("Location"),
- "type": "location",
- "value": data
- }, {
- "name": "country",
- "type": "list",
- "data": "countries_b",
- "label": _("Country Code"),
- "value": data.get("country", dismiss)
- }, {
- "name": "geocode",
- "label": _("Geocode"),
- "type": "geocode",
- "value": data
- }, {
- "name": "clli",
- "label": _("CLLI Code"),
- "value": data.get("clli", dismiss)
- }, {
- "name": "npanxx",
- "label": _("NPA-NXX"),
- "value": data.get("npanxx", dismiss)
- }, {
- "name": "notes",
- "label": _("Notes"),
- "help_text": _("Markdown enabled"),
- "type": "fmt-text",
- "value": data.get("notes", dismiss)
- }]
+ "fields": [
+ {
+ "name": "org",
+ "label": _("Organization"),
+ "value": org.get("name", dismiss),
+ "type": "entity_link",
+ "link": "/%s/%d" % (Organization._handleref.tag, org.get("id")),
+ },
+ {
+ "name": "website",
+ "type": "url",
+ "value": data.get("website", dismiss),
+ "label": _("Website"),
+ },
+ {
+ "name": "address1",
+ "label": _("Address 1"),
+ "value": data.get("address1", dismiss),
+ },
+ {
+ "name": "address2",
+ "label": _("Address 2"),
+ "value": data.get("address2", dismiss),
+ },
+ {
+ "name": "location",
+ "label": _("Location"),
+ "type": "location",
+ "value": data,
+ },
+ {
+ "name": "country",
+ "type": "list",
+ "data": "countries_b",
+ "label": _("Country Code"),
+ "value": data.get("country", dismiss),
+ },
+ {
+ "name": "geocode",
+ "label": _("Geocode"),
+ "type": "geocode",
+ "value": data,
+ },
+ {
+ "name": "clli",
+ "label": _("CLLI Code"),
+ "value": data.get("clli", dismiss),
+ },
+ {
+ "name": "npanxx",
+ "label": _("NPA-NXX"),
+ "value": data.get("npanxx", dismiss),
+ },
+ {
+ "name": "notes",
+ "label": _("Notes"),
+ "help_text": _("Markdown enabled"),
+ "type": "fmt-text",
+ "value": data.get("notes", dismiss),
+ },
+ ],
}
- return view_component(request, "facility", data, "Facility", perms=perms,
- instance=facility)
+ return view_component(
+ request, "facility", data, "Facility", perms=perms, instance=facility
+ )
@ensure_csrf_cookie
@@ -1011,27 +1094,31 @@ def view_exchange(request, id):
"""
try:
- exchange = InternetExchange.objects.get(id=id,
- status__in=["ok", "pending"])
+ exchange = InternetExchange.objects.get(id=id, status__in=["ok", "pending"])
except ObjectDoesNotExist:
return view_http_error_404(request)
- data = InternetExchangeSerializer(exchange, context={
- "user": request.user
- }).data
+ data = InternetExchangeSerializer(exchange, context={"user": request.user}).data
# find out if user can write to object
perms = export_permissions(request.user, exchange)
if not data:
return view_http_error_403(request)
- networks = NetworkIXLan.handleref.undeleted().select_related(
- 'network',
- 'ixlan').order_by('network__name').filter(ixlan__ix=exchange)
+ networks = (
+ NetworkIXLan.handleref.undeleted()
+ .select_related("network", "ixlan")
+ .order_by("network__name")
+ .filter(ixlan__ix=exchange)
+ )
dismiss = DoNotRender()
- facilities = InternetExchangeFacility.handleref.undeleted().select_related(
- 'ix', 'facility').filter(ix=exchange).order_by("facility__name")
+ facilities = (
+ InternetExchangeFacility.handleref.undeleted()
+ .select_related("ix", "facility")
+ .filter(ix=exchange)
+ .order_by("facility__name")
+ )
org = data.get("org")
@@ -1041,94 +1128,107 @@ def view_exchange(request, id):
"facilities": facilities,
"networks": networks,
"ixlans": exchange.ixlan_set_active_or_pending,
- "fields": [{
- "name": "org",
- "label": _("Organization"),
- "value": org.get("name", dismiss),
- "type": "entity_link",
- "link": "/%s/%d" % (Organization._handleref.tag, org.get("id"))
- }, {
- "name": "name_long",
- "label": _("Long Name"),
- "value": data.get("name_long", dismiss)
- }, {
- "name": "city",
- "label": _("City"),
- "value": data.get("city", dismiss)
- }, {
- "name": "country",
- "type": "list",
- "data": "countries_b",
- "label": _("Country"),
- "value": data.get("country", dismiss)
- }, {
- "name": "region_continent",
- "type": "list",
- "data": "enum/regions",
- "label": _("Continental Region"),
- "value": data.get("region_continent", dismiss)
- }, {
- "name": "media",
- "type": "list",
- "data": "enum/media",
- "label": _("Media Type"),
- "value": data.get("media", dismiss)
- }, {
- "type": "flags",
- "label": _("Protocols Supported"),
- "value": [{
- "name": "proto_unicast",
- "label": _("Unicast IPv4"),
- "value": int(data.get("proto_unicast", False))
- }, {
- "name": "proto_multicast",
- "label": _("Multicast"),
- "value": int(data.get("proto_multicast", False))
- }, {
- "name": "proto_ipv6",
- "label": _("IPv6"),
- "value": int(data.get("proto_ipv6", False))
- }]
- }, {
- "name": "notes",
- "label": _("Notes"),
- "help_text": _("Markdown enabled"),
- "type": "fmt-text",
- "value": data.get("notes", dismiss)
- }, {
- "type": "sub",
- "label": _("Contact Information")
- }, {
- "type": "url",
- "name": "website",
- "label": _("Company Website"),
- "value": data.get("website", dismiss)
- }, {
- "type": "url",
- "name": "url_stats",
- "label": _("Traffic Stats Website"),
- "value": data.get("url_stats", dismiss)
- }, {
- "type": "email",
- "name": "tech_email",
- "label": _("Technical Email"),
- "value": data.get("tech_email", dismiss)
- }, {
- "type": "string",
- "name": "tech_phone",
- "label": _("Technical Phone"),
- "value": data.get("tech_phone", dismiss)
- }, {
- "type": "email",
- "name": "policy_email",
- "label": _("Policy Email"),
- "value": data.get("policy_email", dismiss)
- }, {
- "type": "string",
- "name": "policy_phone",
- "label": _("Policy Phone"),
- "value": data.get("policy_phone", dismiss)
- }]
+ "fields": [
+ {
+ "name": "org",
+ "label": _("Organization"),
+ "value": org.get("name", dismiss),
+ "type": "entity_link",
+ "link": "/%s/%d" % (Organization._handleref.tag, org.get("id")),
+ },
+ {
+ "name": "name_long",
+ "label": _("Long Name"),
+ "value": data.get("name_long", dismiss),
+ },
+ {"name": "city", "label": _("City"), "value": data.get("city", dismiss)},
+ {
+ "name": "country",
+ "type": "list",
+ "data": "countries_b",
+ "label": _("Country"),
+ "value": data.get("country", dismiss),
+ },
+ {
+ "name": "region_continent",
+ "type": "list",
+ "data": "enum/regions",
+ "label": _("Continental Region"),
+ "value": data.get("region_continent", dismiss),
+ },
+ {
+ "name": "media",
+ "type": "list",
+ "data": "enum/media",
+ "label": _("Media Type"),
+ "value": data.get("media", dismiss),
+ },
+ {
+ "type": "flags",
+ "label": _("Protocols Supported"),
+ "value": [
+ {
+ "name": "proto_unicast",
+ "label": _("Unicast IPv4"),
+ "value": int(data.get("proto_unicast", False)),
+ },
+ {
+ "name": "proto_multicast",
+ "label": _("Multicast"),
+ "value": int(data.get("proto_multicast", False)),
+ },
+ {
+ "name": "proto_ipv6",
+ "label": _("IPv6"),
+ "value": int(data.get("proto_ipv6", False)),
+ },
+ ],
+ },
+ {
+ "name": "notes",
+ "label": _("Notes"),
+ "help_text": _("Markdown enabled"),
+ "type": "fmt-text",
+ "value": data.get("notes", dismiss),
+ },
+ {"type": "sub", "label": _("Contact Information")},
+ {
+ "type": "url",
+ "name": "website",
+ "label": _("Company Website"),
+ "value": data.get("website", dismiss),
+ },
+ {
+ "type": "url",
+ "name": "url_stats",
+ "label": _("Traffic Stats Website"),
+ "value": data.get("url_stats", dismiss),
+ },
+ {
+ "type": "email",
+ "name": "tech_email",
+ "label": _("Technical Email"),
+ "value": data.get("tech_email", dismiss),
+ },
+ {
+ "type": "string",
+ "name": "tech_phone",
+ "label": _("Technical Phone"),
+ "value": data.get("tech_phone", dismiss),
+ },
+ {
+ "type": "email",
+ "name": "policy_email",
+ "label": _("Policy Email"),
+ "value": data.get("policy_email", dismiss),
+ },
+ {
+ "type": "string",
+ "name": "policy_phone",
+ "label": _("Policy Phone"),
+ "value": data.get("policy_phone", dismiss),
+ },
+ ],
}
ixlan_num = data["ixlans"].count()
@@ -1144,30 +1244,39 @@ def view_exchange(request, id):
ixlan = data["ixlans"].first()
- data["fields"].extend([{
- "type": "sub",
- "label": _("LAN")
- }, {
- "type": "number",
- "name": "mtu",
- "label": _("MTU"),
- "value": ixlan.mtu or ""
- }, {
- "type": "bool",
- "name": "dot1q_support",
- "label": _("DOT1Q"),
- "value": ixlan.dot1q_support
- }])
-
- data["fields"].extend([{
- "type": "string",
- "name": "prefix_%d" % prefix.id,
- "label": _(prefix.protocol),
- "value": prefix.prefix
- } for prefix in ixlan.ixpfx_set_active])
+ data["fields"].extend(
+ [
+ {"type": "sub", "label": _("LAN")},
+ {
+ "type": "number",
+ "name": "mtu",
+ "label": _("MTU"),
+ "value": ixlan.mtu or "",
+ },
+ {
+ "type": "bool",
+ "name": "dot1q_support",
+ "label": _("DOT1Q"),
+ "value": ixlan.dot1q_support,
+ },
+ ]
+ )
+
+ data["fields"].extend(
+ [
+ {
+ "type": "string",
+ "name": "prefix_%d" % prefix.id,
+ "label": _(prefix.protocol),
+ "value": prefix.prefix,
+ }
+ for prefix in ixlan.ixpfx_set_active
+ ]
+ )
- return view_component(request, "exchange", data, "Exchange", perms=perms,
- instance=exchange)
+ return view_component(
+ request, "exchange", data, "Exchange", perms=perms, instance=exchange
+ )
@ensure_csrf_cookie
@@ -1200,8 +1309,8 @@ def view_network(request, id):
try:
network = NetworkSerializer.prefetch_related(
- Network.objects, request, depth=2).get(
- id=id, status__in=["ok", "pending"])
+ Network.objects, request, depth=2
+ ).get(id=id, status__in=["ok", "pending"])
except ObjectDoesNotExist:
return view_http_error_404(request)
@@ -1212,12 +1321,19 @@ def view_network(request, id):
perms = export_permissions(request.user, network)
- facilities = NetworkFacility.handleref.undeleted().select_related(
- "facility").filter(network=network).order_by("facility__name")
+ facilities = (
+ NetworkFacility.handleref.undeleted()
+ .select_related("facility")
+ .filter(network=network)
+ .order_by("facility__name")
+ )
- exchanges = NetworkIXLan.handleref.undeleted().select_related(
- "ixlan", "ixlan__ix",
- "network").filter(network=network).order_by("ixlan__ix__name")
+ exchanges = (
+ NetworkIXLan.handleref.undeleted()
+ .select_related("ixlan", "ixlan__ix", "network")
+ .filter(network=network)
+ .order_by("ixlan__ix__name")
+ )
# This will be passed as default value for keys that dont exist - causing
# them not to be rendered in the template - also it is fairly
@@ -1231,177 +1347,197 @@ def view_network(request, id):
"title": network_d.get("name", dismiss),
"facilities": facilities,
"exchanges": exchanges,
- "fields": [{
- "name": "org",
- "label": _("Organization"),
- "value": org.get("name", dismiss),
- "type": "entity_link",
- "link": "/%s/%d" % (Organization._handleref.tag, org.get("id"))
- }, {
- "name": "aka",
- "label": _("Also Known As"),
- "notify_incomplete": True,
- "value": network_d.get("aka", dismiss)
- }, {
- "name": "website",
- "label": _("Company Website"),
- "type": "url",
- "notify_incomplete": True,
- "value": network_d.get("website", dismiss)
- }, {
- "name": "asn",
- "label": _("Primary ASN"),
- "notify_incomplete": True,
- "value": network_d.get("asn", dismiss)
- }, {
- "name": "irr_as_set",
- "label": _("IRR as-set/route-set"),
- "notify_incomplete": True,
- "value": network_d.get("irr_as_set", dismiss)
- }, {
- "name": "route_server",
- "type": "url",
- "label": _("Route Server URL"),
- "notify_incomplete": True,
- "value": network_d.get("route_server", dismiss)
- }, {
- "name": "looking_glass",
- "type": "url",
- "label": _("Looking Glass URL"),
- "notify_incomplete": True,
- "value": network_d.get("looking_glass", dismiss)
- }, {
- "name": "info_type",
- "type": "list",
- "data": "enum/net_types",
- "blank": _("Not Disclosed"),
- "label": _("Network Type"),
- "notify_incomplete": True,
- "value": network_d.get("info_type", dismiss)
- }, {
- "name": "info_prefixes4",
- "label": _("IPv4 Prefixes"),
- "type": "number",
- "notify_incomplete": True,
- "value": int(network_d.get("info_prefixes4") or 0)
- }, {
- "name": "info_prefixes6",
- "label": _("IPv6 Prefixes"),
- "type": "number",
- "notify_incomplete": True,
- "value": int(network_d.get("info_prefixes6") or 0)
- }, {
- "name": "info_traffic",
- "type": "list",
- "data": "enum/traffic",
- "blank": _("Not Disclosed"),
- "label": _("Traffic Levels"),
- "value": network_d.get("info_traffic", dismiss)
- }, {
- "name": "info_ratio",
- "type": "list",
- "data": "enum/ratios",
- "label": _("Traffic Ratios"),
- "blank": _("Not Disclosed"),
- "value": network_d.get("info_ratio", dismiss)
- }, {
- "name": "info_scope",
- "type": "list",
- "data": "enum/scopes",
- "blank": _("Not Disclosed"),
- "label": _("Geographic Scope"),
- "value": network_d.get("info_scope", dismiss)
- }, {
- "type": "flags",
- "label": _("Protocols Supported"),
- "value": [{
- "name": "info_unicast",
- "label": _("Unicast IPv4"),
- "value": network_d.get("info_unicast", False)
- }, {
- "name": "info_multicast",
- "label": _("Multicast"),
- "value": network_d.get("info_multicast", False)
- }, {
- "name": "info_ipv6",
- "label": _("IPv6"),
- "value": network_d.get("info_ipv6", False)
- }]
- }, {
- "readonly": True,
- "name": "updated",
- "label": _("Last Updated"),
- "value": network_d.get("updated", dismiss)
- }, {
- "name": "notes",
- "label": _("Notes"),
- "help_text": _("Markdown enabled"),
- "type": "fmt-text",
- "value": network_d.get("notes", dismiss)
- }, {
- "type": "sub",
- "admin": True,
- "label": _("PeeringDB Configuration")
- }, {
- "type": "flags",
- "admin": True,
- "label": _("Allow IXP Update"),
- "help_text": _(
- "If enabled, an ixp may manage this network's entry in their peering list"
- ),
- "value": [{
- "name": "allow_ixp_update",
- "label": "",
- "value": network.allow_ixp_update
- }]
- }, {
- "type": "action",
- "admin": True,
- "label": _("IXP Update Tools"),
- "actions": [{
- "label": _("Preview"),
- "action": "ixf_preview",
+ "fields": [
+ {
+ "name": "org",
+ "label": _("Organization"),
+ "value": org.get("name", dismiss),
+ "type": "entity_link",
+ "link": "/%s/%d" % (Organization._handleref.tag, org.get("id")),
+ },
+ {
+ "name": "aka",
+ "label": _("Also Known As"),
+ "notify_incomplete": True,
+ "value": network_d.get("aka", dismiss),
+ },
+ {
+ "name": "website",
+ "label": _("Company Website"),
+ "type": "url",
+ "notify_incomplete": True,
+ "value": network_d.get("website", dismiss),
+ },
+ {
+ "name": "asn",
+ "label": _("Primary ASN"),
+ "notify_incomplete": True,
+ "value": network_d.get("asn", dismiss),
+ },
+ {
+ "name": "irr_as_set",
+ "label": _("IRR as-set/route-set"),
+ "notify_incomplete": True,
+ "value": network_d.get("irr_as_set", dismiss),
+ },
+ {
+ "name": "route_server",
+ "type": "url",
+ "label": _("Route Server URL"),
+ "notify_incomplete": True,
+ "value": network_d.get("route_server", dismiss),
+ },
+ {
+ "name": "looking_glass",
+ "type": "url",
+ "label": _("Looking Glass URL"),
+ "notify_incomplete": True,
+ "value": network_d.get("looking_glass", dismiss),
},
{
- "label": _("Postmortem"),
- "action": "ixf_postmortem"
- }]
- }, {
- "type": "sub",
- "label": _("Peering Policy Information")
- }, {
- "name": "policy_url",
- "label": _("Peering Policy"),
- "value": network_d.get("policy_url", dismiss),
- "notify_incomplete": True,
- "type": "url"
- }, {
- "name": "policy_general",
- "type": "list",
- "data": "enum/policy_general",
- "label": _("General Policy"),
- "value": network_d.get("policy_general", dismiss)
- }, {
- "name": "policy_locations",
- "type": "list",
- "data": "enum/policy_locations",
- "label": _("Multiple Locations"),
- "value": network_d.get("policy_locations", dismiss)
- }, {
- "name": "policy_ratio",
- "type": "list",
- "data": "enum/bool_choice_str",
- "label": _("Ratio Requirement"),
- "value": network_d.get("policy_ratio", dismiss),
- "value_label": dict(BOOL_CHOICE).get(
- network_d.get("policy_ratio"))
- }, {
- "name": "policy_contracts",
- "type": "list",
- "data": "enum/policy_contracts",
- "label": _("Contract Requirement"),
- "value": network_d.get("policy_contracts", dismiss)
- }]
+ "name": "info_type",
+ "type": "list",
+ "data": "enum/net_types",
+ "blank": _("Not Disclosed"),
+ "label": _("Network Type"),
+ "notify_incomplete": True,
+ "value": network_d.get("info_type", dismiss),
+ },
+ {
+ "name": "info_prefixes4",
+ "label": _("IPv4 Prefixes"),
+ "type": "number",
+ "notify_incomplete": True,
+ "value": int(network_d.get("info_prefixes4") or 0),
+ },
+ {
+ "name": "info_prefixes6",
+ "label": _("IPv6 Prefixes"),
+ "type": "number",
+ "notify_incomplete": True,
+ "value": int(network_d.get("info_prefixes6") or 0),
+ },
+ {
+ "name": "info_traffic",
+ "type": "list",
+ "data": "enum/traffic",
+ "blank": _("Not Disclosed"),
+ "label": _("Traffic Levels"),
+ "value": network_d.get("info_traffic", dismiss),
+ },
+ {
+ "name": "info_ratio",
+ "type": "list",
+ "data": "enum/ratios",
+ "label": _("Traffic Ratios"),
+ "blank": _("Not Disclosed"),
+ "value": network_d.get("info_ratio", dismiss),
+ },
+ {
+ "name": "info_scope",
+ "type": "list",
+ "data": "enum/scopes",
+ "blank": _("Not Disclosed"),
+ "label": _("Geographic Scope"),
+ "value": network_d.get("info_scope", dismiss),
+ },
+ {
+ "type": "flags",
+ "label": _("Protocols Supported"),
+ "value": [
+ {
+ "name": "info_unicast",
+ "label": _("Unicast IPv4"),
+ "value": network_d.get("info_unicast", False),
+ },
+ {
+ "name": "info_multicast",
+ "label": _("Multicast"),
+ "value": network_d.get("info_multicast", False),
+ },
+ {
+ "name": "info_ipv6",
+ "label": _("IPv6"),
+ "value": network_d.get("info_ipv6", False),
+ },
+ ],
+ },
+ {
+ "readonly": True,
+ "name": "updated",
+ "label": _("Last Updated"),
+ "value": network_d.get("updated", dismiss),
+ },
+ {
+ "name": "notes",
+ "label": _("Notes"),
+ "help_text": _("Markdown enabled"),
+ "type": "fmt-text",
+ "value": network_d.get("notes", dismiss),
+ },
+ {"type": "sub", "admin": True, "label": _("PeeringDB Configuration")},
+ {
+ "type": "flags",
+ "admin": True,
+ "label": _("Allow IXP Update"),
+ "help_text": _(
+ "If enabled, an ixp may manage this network's entry in their peering list"
+ ),
+ "value": [
+ {
+ "name": "allow_ixp_update",
+ "label": "",
+ "value": network.allow_ixp_update,
+ }
+ ],
+ },
+ {
+ "type": "action",
+ "admin": True,
+ "label": _("IXP Update Tools"),
+ "actions": [
+ {"label": _("Preview"), "action": "ixf_preview",},
+ {"label": _("Postmortem"), "action": "ixf_postmortem"},
+ ],
+ },
+ {"type": "sub", "label": _("Peering Policy Information")},
+ {
+ "name": "policy_url",
+ "label": _("Peering Policy"),
+ "value": network_d.get("policy_url", dismiss),
+ "notify_incomplete": True,
+ "type": "url",
+ },
+ {
+ "name": "policy_general",
+ "type": "list",
+ "data": "enum/policy_general",
+ "label": _("General Policy"),
+ "value": network_d.get("policy_general", dismiss),
+ },
+ {
+ "name": "policy_locations",
+ "type": "list",
+ "data": "enum/policy_locations",
+ "label": _("Multiple Locations"),
+ "value": network_d.get("policy_locations", dismiss),
+ },
+ {
+ "name": "policy_ratio",
+ "type": "list",
+ "data": "enum/bool_choice_str",
+ "label": _("Ratio Requirement"),
+ "value": network_d.get("policy_ratio", dismiss),
+ "value_label": dict(BOOL_CHOICE).get(network_d.get("policy_ratio")),
+ },
+ {
+ "name": "policy_contracts",
+ "type": "list",
+ "data": "enum/policy_contracts",
+ "label": _("Contract Requirement"),
+ "value": network_d.get("policy_contracts", dismiss),
+ },
+ ],
}
# Add POC data to dataset
@@ -1409,12 +1545,13 @@ def view_network(request, id):
if not request.user.is_authenticated() or not request.user.is_verified:
cnt = network.poc_set.filter(status="ok", visible="Users").count()
- data["poc_hidden"] = (cnt > 0)
+ data["poc_hidden"] = cnt > 0
else:
data["poc_hidden"] = False
- return view_component(request, "network", data, "Network", perms=perms,
- instance=network)
+ return view_component(
+ request, "network", data, "Network", perms=perms, instance=network
+ )
def view_suggest(request, reftag):
@@ -1446,6 +1583,7 @@ def view_aup(request):
return view_simple_content(request, "site/aup.html")
+
def view_about(request):
"""
Render page containing about
@@ -1468,7 +1606,7 @@ def view_sponsorships(request):
"diamond": qset.filter(level=4),
"platinum": qset.filter(level=3),
"gold": qset.filter(level=2),
- "silver": qset.filter(level=1)
+ "silver": qset.filter(level=1),
}
env = make_env(sponsorships=sponsorships)
@@ -1490,7 +1628,8 @@ def view_partnerships(request):
partnerships[row.level].append(row)
env = make_env(
- partnership_levels=dict(PARTNERSHIP_LEVELS), partnerships=partnerships)
+ partnership_levels=dict(PARTNERSHIP_LEVELS), partnerships=partnerships
+ )
return HttpResponse(template.render(env, request))
@@ -1506,26 +1645,26 @@ def view_advanced_search(request):
if reftag == "net":
try:
- env["ix_name"] = InternetExchange.objects.get(
- id=request.GET.get("ix")).name
+ env["ix_name"] = InternetExchange.objects.get(id=request.GET.get("ix")).name
except (ObjectDoesNotExist, ValueError):
env["ix_name"] = ""
try:
env["not_ix_name"] = InternetExchange.objects.get(
- id=request.GET.get("not_ix")).name
+ id=request.GET.get("not_ix")
+ ).name
except (ObjectDoesNotExist, ValueError):
env["not_ix_name"] = ""
try:
- env["fac_name"] = Facility.objects.get(
- id=request.GET.get("fac")).name
+ env["fac_name"] = Facility.objects.get(id=request.GET.get("fac")).name
except (ObjectDoesNotExist, ValueError):
env["fac_name"] = ""
try:
env["not_fac_name"] = Facility.objects.get(
- id=request.GET.get("not_fac")).name
+ id=request.GET.get("not_fac")
+ ).name
except (ObjectDoesNotExist, ValueError):
env["not_fac_name"] = ""
@@ -1533,21 +1672,21 @@ def view_advanced_search(request):
def request_api_search(request):
- q = request.GET.get('q')
+ q = request.GET.get("q")
if not q:
return HttpResponseBadRequest()
result = search(q)
- return HttpResponse(json.dumps(result), content_type='application/json')
+ return HttpResponse(json.dumps(result), content_type="application/json")
def request_search(request):
"""
XHR search request goes here
"""
- q = request.GET.get('q')
+ q = request.GET.get("q")
if not q:
return HttpResponseRedirect("/")
@@ -1558,26 +1697,32 @@ def request_search(request):
if m:
net = Network.objects.filter(asn=m.group(2), status="ok")
if net.exists() and net.count() == 1:
- return HttpResponseRedirect('/net/{}'.format(net.first().id))
+ return HttpResponseRedirect("/net/{}".format(net.first().id))
result = search(q)
- sponsors = dict([(org.id, sponsorship.label.lower()) for org, sponsorship in Sponsorship.active_by_org()])
+ sponsors = dict(
+ [
+ (org.id, sponsorship.label.lower())
+ for org, sponsorship in Sponsorship.active_by_org()
+ ]
+ )
for tag, rows in result.items():
for item in rows:
item["sponsorship"] = sponsors.get(item["org_id"])
- template = loader.get_template('site/search_result.html')
+ template = loader.get_template("site/search_result.html")
env = make_env(
**{
- 'search_ixp': result.get(InternetExchange._handleref.tag),
- 'search_net': result.get(Network._handleref.tag),
- 'search_fac': result.get(Facility._handleref.tag),
- 'count_ixp': len(result.get(InternetExchange._handleref.tag, [])),
- 'count_net': len(result.get(Network._handleref.tag, [])),
- 'count_fac': len(result.get(Facility._handleref.tag, []))
- })
+ "search_ixp": result.get(InternetExchange._handleref.tag),
+ "search_net": result.get(Network._handleref.tag),
+ "search_fac": result.get(Facility._handleref.tag),
+ "count_ixp": len(result.get(InternetExchange._handleref.tag, [])),
+ "count_net": len(result.get(Network._handleref.tag, [])),
+ "count_fac": len(result.get(Facility._handleref.tag, [])),
+ }
+ )
return HttpResponse(template.render(env, request))
@@ -1599,12 +1744,12 @@ def request_login(request):
was_limited = getattr(request, "limited", False)
if was_limited:
- return view_login(request, errors=[
- _("Please wait a bit before trying to login again.")
- ])
+ return view_login(
+ request, errors=[_("Please wait a bit before trying to login again.")]
+ )
- username = request.POST['username']
- password = request.POST['password']
+ username = request.POST["username"]
+ password = request.POST["password"]
redir = request.POST.get("next") or "/"
if redir == "/logout":
redir = "/"
@@ -1621,6 +1766,7 @@ def request_login(request):
login(request, user)
from django.utils import translation
+
user_language = user.get_locale()
translation.activate(user_language)
request.session[translation.LANGUAGE_SESSION_KEY] = user_language
@@ -1635,37 +1781,34 @@ def request_login(request):
def request_translation(request, data_type):
if not request.user.is_authenticated():
- return JsonResponse({
- "status": 'error',
- "error": "Please login to use translation service"
- })
+ return JsonResponse(
+ {"status": "error", "error": "Please login to use translation service"}
+ )
user_language = request.user.get_locale()
if not user_language:
- user_language = 'en'
+ user_language = "en"
note = request.POST.get("note")
target = user_language
if note and target:
- translationURL = 'https://translation.googleapis.com/language/translate/v2'
+ translationURL = "https://translation.googleapis.com/language/translate/v2"
call_params = {
- 'key': dj_settings.GOOGLE_GEOLOC_API_KEY,
- 'q': note,
- 'target': target
+ "key": dj_settings.GOOGLE_GEOLOC_API_KEY,
+ "q": note,
+ "target": target,
}
reply = requests.post(translationURL, params=call_params).json()
if not "data" in reply:
return JsonResponse({"status": request.POST, "error": reply})
- return JsonResponse({
- "status": request.POST,
- "translation": reply["data"]["translations"][0]
- })
+ return JsonResponse(
+ {"status": request.POST, "translation": reply["data"]["translations"][0]}
+ )
- return JsonResponse({
- "status": 'error',
- "error": "No text or no language specified"
- })
+ return JsonResponse(
+ {"status": "error", "error": "No text or no language specified"}
+ )
diff --git a/tests/conftest.py b/tests/conftest.py
index 389c944f..5e125bc3 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -11,7 +11,7 @@
def pytest_generate_tests(metafunc):
for fixture in metafunc.fixturenames:
- if fixture.startswith('data_'):
+ if fixture.startswith("data_"):
data = pytest_filedata.get_data(fixture)
metafunc.parametrize(fixture, list(data.values()), ids=list(data.keys()))
diff --git a/tests/django_init.py b/tests/django_init.py
index 7952dc61..288b226f 100644
--- a/tests/django_init.py
+++ b/tests/django_init.py
@@ -3,92 +3,86 @@
# lazy init for translations
_ = lambda s: s
-#from django.utils.translation import ugettext_lazy as _
+# from django.utils.translation import ugettext_lazy as _
settings.configure(
PACKAGE_VERSION="dev",
RELEASE_ENV="dev",
- MIGRATION_MODULES={"django_peeringdb":None},
+ MIGRATION_MODULES={"django_peeringdb": None},
INSTALLED_APPS=[
- 'django.contrib.auth',
- 'django.contrib.contenttypes',
- 'django.contrib.admin',
- 'django.contrib.sessions',
- 'django.contrib.sites',
- 'django_inet',
- 'django_peeringdb',
- 'django_namespace_perms',
- 'django_countries',
- 'oauth2_provider',
- 'peeringdb_server',
- 'allauth',
- 'allauth.account',
- 'reversion',
- 'rest_framework',
- 'dal',
- 'dal_select2',
- 'corsheaders',
- 'captcha',
+ "django.contrib.auth",
+ "django.contrib.contenttypes",
+ "django.contrib.admin",
+ "django.contrib.sessions",
+ "django.contrib.sites",
+ "django_inet",
+ "django_peeringdb",
+ "django_namespace_perms",
+ "django_countries",
+ "oauth2_provider",
+ "peeringdb_server",
+ "allauth",
+ "allauth.account",
+ "reversion",
+ "rest_framework",
+ "dal",
+ "dal_select2",
+ "corsheaders",
+ "captcha",
],
CACHES={
"default": {
"BACKEND": "django.core.cache.backends.db.DatabaseCache",
- "LOCATION": "django_cache"
+ "LOCATION": "django_cache",
}
},
- TEMPLATES=[{
- "BACKEND": 'django.template.backends.django.DjangoTemplates',
- "APP_DIRS": True,
- "OPTIONS": {
- "context_processors": [
- "django.contrib.auth.context_processors.auth",
- "django.template.context_processors.debug",
- "django.template.context_processors.request",
- "django.template.context_processors.i18n",
- "django.template.context_processors.media",
- "django.template.context_processors.static",
- "django.template.context_processors.tz",
- "django.contrib.messages.context_processors.messages",
- ],
- #"loaders" : TEMPLATE_LOADERS
+ TEMPLATES=[
+ {
+ "BACKEND": "django.template.backends.django.DjangoTemplates",
+ "APP_DIRS": True,
+ "OPTIONS": {
+ "context_processors": [
+ "django.contrib.auth.context_processors.auth",
+ "django.template.context_processors.debug",
+ "django.template.context_processors.request",
+ "django.template.context_processors.i18n",
+ "django.template.context_processors.media",
+ "django.template.context_processors.static",
+ "django.template.context_processors.tz",
+ "django.contrib.messages.context_processors.messages",
+ ],
+ # "loaders" : TEMPLATE_LOADERS
+ },
}
- }],
- LANGUAGE_CODE='en-us',
- LANGUAGES=[
- ('en', _('English')),
- ('pt', _('Portuguese')),
],
+ LANGUAGE_CODE="en-us",
+ LANGUAGES=[("en", _("English")), ("pt", _("Portuguese")),],
USE_L10N=True,
USE_I18N=True,
MIDDLEWARE_CLASSES=(
- 'corsheaders.middleware.CorsMiddleware',
- 'django.contrib.sessions.middleware.SessionMiddleware',
- 'django.middleware.locale.LocaleMiddleware',
- 'django.middleware.common.CommonMiddleware',
- 'django.middleware.csrf.CsrfViewMiddleware',
- 'django.contrib.auth.middleware.AuthenticationMiddleware',
- 'django.contrib.messages.middleware.MessageMiddleware',
- 'peeringdb_server.maintenance.Middleware',
+ "corsheaders.middleware.CorsMiddleware",
+ "django.contrib.sessions.middleware.SessionMiddleware",
+ "django.middleware.locale.LocaleMiddleware",
+ "django.middleware.common.CommonMiddleware",
+ "django.middleware.csrf.CsrfViewMiddleware",
+ "django.contrib.auth.middleware.AuthenticationMiddleware",
+ "django.contrib.messages.middleware.MessageMiddleware",
+ "peeringdb_server.maintenance.Middleware",
),
SOUTH_TESTS_MIGRATE=False,
SOUTH_SKIP_TESTS=True,
- AUTH_USER_MODEL='peeringdb_server.User',
- TABLE_PREFIX='peeringdb_',
+ AUTH_USER_MODEL="peeringdb_server.User",
+ TABLE_PREFIX="peeringdb_",
PEERINGDB_ABSTRACT_ONLY=True,
- COUNTRIES_OVERRIDE={'XK': _('Kosovo')},
+ COUNTRIES_OVERRIDE={"XK": _("Kosovo")},
CLIENT_COMPAT={
- "client":{"min": (0,6), "max":(0,6,5)},
- "backends":{
- "django_peeringdb":{"min":(0,6), "max":(0,6,5)}
- }
+ "client": {"min": (0, 6), "max": (0, 6, 5)},
+ "backends": {"django_peeringdb": {"min": (0, 6), "max": (0, 6, 5)}},
},
- DATABASE_ENGINE='django.db.backends.sqlite3',
+ DATABASE_ENGINE="django.db.backends.sqlite3",
DATABASES={
- 'default': {
- 'ENGINE': 'django.db.backends.sqlite3',
- 'NAME': ':memory:',
- },
- #XXX - this is supposed to work to mimic replication
+ "default": {"ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:",},
+ # XXX - this is supposed to work to mimic replication
# during tests, but doesnt. So instead we use the
# peeringdb_server.db_router.TestRouter class instead
# which just always used the default db for read and writes
@@ -96,10 +90,10 @@
# 'ENGINE': 'django.db.backends.sqlite3',
# 'NAME': ':memory:',
# 'TEST' : { 'MIRROR' : 'default' }
- #}
+ # }
},
- #XXX - change to peeringdb_server.db_router.DatabaseRouter
- #if repliation mimicing (see above) gets fixed
+ # XXX - change to peeringdb_server.db_router.DatabaseRouter
+ # if repliation mimicing (see above) gets fixed
DATABASE_ROUTERS=["peeringdb_server.db_router.TestRouter"],
DEBUG=False,
GUEST_GROUP_ID=1,
@@ -112,40 +106,30 @@
SUGGEST_ENTITY_ORG=1234,
API_URL="localhost",
REST_FRAMEWORK={
- 'DEFAULT_AUTHENTICATION_CLASSES': (
- 'rest_framework.authentication.BasicAuthentication',
- 'rest_framework.authentication.SessionAuthentication'),
- 'DEFAULT_MODEL_SERIALIZER_CLASS': 'rest_framework.serializers.HyperlinkedModelSerializer',
- 'DEFAULT_PERMISSION_CLASSES': [
- 'rest_framework.permissions.DjangoModelPermissionsOrAnonReadOnly',
- 'django_namespace_perms.rest.BasePermission',
+ "DEFAULT_AUTHENTICATION_CLASSES": (
+ "rest_framework.authentication.BasicAuthentication",
+ "rest_framework.authentication.SessionAuthentication",
+ ),
+ "DEFAULT_MODEL_SERIALIZER_CLASS": "rest_framework.serializers.HyperlinkedModelSerializer",
+ "DEFAULT_PERMISSION_CLASSES": [
+ "rest_framework.permissions.DjangoModelPermissionsOrAnonReadOnly",
+ "django_namespace_perms.rest.BasePermission",
],
- 'DEFAULT_RENDERER_CLASSES': (
- 'peeringdb_server.renderers.MetaJSONRenderer', )
+ "DEFAULT_RENDERER_CLASSES": ("peeringdb_server.renderers.MetaJSONRenderer",),
},
NSP_MODE="crud",
NSP_GUEST_GROUP="guest",
DEBUG_EMAIL=True,
TIME_ZONE="UTC",
USE_TZ=True,
- AUTHENTICATION_BACKENDS=(
- "django_namespace_perms.auth.backends.NSPBackend", ),
+ AUTHENTICATION_BACKENDS=("django_namespace_perms.auth.backends.NSPBackend",),
ROOT_URLCONF="peeringdb_com.urls",
LOGGING={
- 'version': 1,
- 'disable_existing_loggers': False,
- 'handlers': {
- 'stderr': {
- 'level': 'DEBUG',
- 'class': 'logging.StreamHandler',
- },
- },
- 'loggers': {
- '': {
- 'handlers': ['stderr'],
- 'level': 'DEBUG',
- 'propagate': False
- },
+ "version": 1,
+ "disable_existing_loggers": False,
+ "handlers": {"stderr": {"level": "DEBUG", "class": "logging.StreamHandler",},},
+ "loggers": {
+ "": {"handlers": ["stderr"], "level": "DEBUG", "propagate": False},
},
},
OAUTH_ENABLED=False,
@@ -156,10 +140,10 @@
CORS_ALLOW_CREDENTIALS=False,
DATA_QUALITY_MAX_PREFIX_V4_LIMIT=500000,
DATA_QUALITY_MAX_PREFIX_V6_LIMIT=500000,
- DATA_QUALITY_MIN_PREFIXLEN_V4 = 18,
- DATA_QUALITY_MAX_PREFIXLEN_V4 = 28,
- DATA_QUALITY_MIN_PREFIXLEN_V6 = 64,
- DATA_QUALITY_MAX_PREFIXLEN_V6 = 116,
+ DATA_QUALITY_MIN_PREFIXLEN_V4=18,
+ DATA_QUALITY_MAX_PREFIXLEN_V4=28,
+ DATA_QUALITY_MIN_PREFIXLEN_V6=64,
+ DATA_QUALITY_MAX_PREFIXLEN_V6=116,
TUTORIAL_MODE=False,
CAPTCHA_TEST_MODE=True,
SITE_ID=1,
@@ -176,5 +160,6 @@
"view_verify_POST": "2/m",
"request_translation": "10/m",
"view_import_ixlan_ixf_preview": "1/m",
- "view_import_net_ixf_postmortem": "1/m"
- })
+ "view_import_net_ixf_postmortem": "1/m",
+ },
+)
diff --git a/tests/test_admin.py b/tests/test_admin.py
index 6781bebf..796e0912 100644
--- a/tests/test_admin.py
+++ b/tests/test_admin.py
@@ -16,11 +16,7 @@ class AdminTests(TestCase):
@classmethod
def entity_data(cls, org, tag):
- kwargs = {
- "name": "%s %s" % (org.name, tag),
- "status": "ok",
- "org": org
- }
+ kwargs = {"name": "%s %s" % (org.name, tag), "status": "ok", "org": org}
if tag == "net":
cls.asn_count += 1
kwargs.update(asn=cls.asn_count)
@@ -35,23 +31,26 @@ def setUpTestData(cls):
cls.entities["org"] = [
org
for org in [
- models.Organization.objects.create(
- name="Org %d" % i, status="ok") for i in range(0, 9)
+ models.Organization.objects.create(name="Org %d" % i, status="ok")
+ for i in range(0, 9)
]
]
# set up a network,facility and ix under each org
for tag in ["ix", "net", "fac"]:
cls.entities[tag] = [
- models.REFTAG_MAP[tag].objects.create(**cls.entity_data(
- org, tag)) for org in cls.entities["org"]
+ models.REFTAG_MAP[tag].objects.create(**cls.entity_data(org, tag))
+ for org in cls.entities["org"]
]
# create a user under each org
cls.entities["user"] = [
models.User.objects.create_user(
- "user " + org.name, "%s@localhost" % org.name,
- first_name="First", last_name="Last")
+ "user " + org.name,
+ "%s@localhost" % org.name,
+ first_name="First",
+ last_name="Last",
+ )
for org in cls.entities["org"]
]
i = 0
@@ -60,34 +59,39 @@ def setUpTestData(cls):
i += 1
cls.admin_user = models.User.objects.create_user(
- "admin", "admin@localhost", first_name="admin", last_name="admin")
+ "admin", "admin@localhost", first_name="admin", last_name="admin"
+ )
cls.admin_user.is_superuser = True
cls.admin_user.is_staff = True
cls.admin_user.save()
cls.admin_user.set_password("admin")
cls.admin_user.save()
- #set up some ixlans
+ # set up some ixlans
cls.entities["ixlan"] = [
- models.IXLan.objects.create(ix=ix, status="ok")
- for ix in cls.entities["ix"]
+ models.IXLan.objects.create(ix=ix, status="ok") for ix in cls.entities["ix"]
]
- #set up a prefix
+ # set up a prefix
cls.entities["ixpfx"] = [
models.IXLanPrefix.objects.create(
ixlan=cls.entities["ixlan"][0],
protocol="IPv4",
prefix="207.41.110.0/24",
- status="ok")
+ status="ok",
+ )
]
- #set up some netixlans
+ # set up some netixlans
cls.entities["netixlan"] = [
models.NetworkIXLan.objects.create(
- network=cls.entities["net"][0], ixlan=cls.entities["ixlan"][0],
- ipaddr4=addr, status="ok", asn=cls.entities["net"][0].asn,
- speed=1000)
+ network=cls.entities["net"][0],
+ ixlan=cls.entities["ixlan"][0],
+ ipaddr4=addr,
+ status="ok",
+ asn=cls.entities["net"][0].asn,
+ speed=1000,
+ )
for addr in ["207.41.110.37", "207.41.110.38", "207.41.110.39"]
]
@@ -104,15 +108,17 @@ def test_views(self):
"""
m = [
- models.Facility, models.InternetExchange, models.Network,
- models.Organization, models.User
+ models.Facility,
+ models.InternetExchange,
+ models.Network,
+ models.Organization,
+ models.User,
]
c = Client()
c.login(username="admin", password="admin")
for model in m:
- url = "/cp/%s/%s/" % (model._meta.app_label,
- model._meta.model_name)
+ url = "/cp/%s/%s/" % (model._meta.app_label, model._meta.model_name)
response = c.get(url, follow=True)
self.assertEqual(response.status_code, 200)
@@ -229,8 +235,8 @@ def test_commandline_tool(self):
self.assertEqual(response.status_code, 200)
for i, n in models.COMMANDLINE_TOOLS:
self.assertGreater(
- response.content.find(''.format(
- i, n)), -1)
+ response.content.find(''.format(i, n)), -1
+ )
def test_commandline_tool_renumber_lans(self):
# test the form that runs the renumer ip space tool
@@ -246,22 +252,25 @@ def test_commandline_tool_renumber_lans(self):
self.assertGreater(
cont.find(
''
- ), -1)
+ ),
+ -1,
+ )
self.assertGreater(
cont.find(
''
- ), -1)
+ ),
+ -1,
+ )
self.assertGreater(
- cont.find(
- ''),
- -1)
+ cont.find(''), -1
+ )
# test post to renumber lans command form (preview)
data = {
"tool": "pdb_renumber_lans",
"exchange": self.entities["ix"][0].id,
"old_prefix": "207.41.110.0/24",
- "new_prefix": "207.41.111.0/24"
+ "new_prefix": "207.41.111.0/24",
}
url = "/cp/peeringdb_server/commandlinetool/preview/"
response = c.post(url, data, follow=True)
@@ -269,27 +278,35 @@ def test_commandline_tool_renumber_lans(self):
self.assertEqual(response.status_code, 200)
self.assertGreater(
cont.find(
- '[pretend] Renumbering ixpfx1 207.41.110.0/24 -> 207.41.111.0/24'
- ), -1)
+ "[pretend] Renumbering ixpfx1 207.41.110.0/24 -> 207.41.111.0/24"
+ ),
+ -1,
+ )
self.assertGreater(
cont.find(
- '[pretend] Renumbering netixlan1 AS1 207.41.110.37 -> 207.41.111.37'
- ), -1)
+ "[pretend] Renumbering netixlan1 AS1 207.41.110.37 -> 207.41.111.37"
+ ),
+ -1,
+ )
self.assertGreater(
cont.find(
- '[pretend] Renumbering netixlan2 AS1 207.41.110.38 -> 207.41.111.38'
- ), -1)
+ "[pretend] Renumbering netixlan2 AS1 207.41.110.38 -> 207.41.111.38"
+ ),
+ -1,
+ )
self.assertGreater(
cont.find(
- '[pretend] Renumbering netixlan3 AS1 207.41.110.39 -> 207.41.111.39'
- ), -1)
+ "[pretend] Renumbering netixlan3 AS1 207.41.110.39 -> 207.41.111.39"
+ ),
+ -1,
+ )
# test post to renumber lans command form
data = {
"tool": "pdb_renumber_lans",
"exchange": self.entities["ix"][0].id,
"old_prefix": "207.41.110.0/24",
- "new_prefix": "207.41.111.0/24"
+ "new_prefix": "207.41.111.0/24",
}
url = "/cp/peeringdb_server/commandlinetool/run/"
response = c.post(url, data, follow=True)
@@ -297,29 +314,21 @@ def test_commandline_tool_renumber_lans(self):
self.assertEqual(response.status_code, 200)
self.assertGreater(
- cont.find(
- '>Renumbering ixpfx1 207.41.110.0/24 -> 207.41.111.0/24'
- ), -1)
+ cont.find(">Renumbering ixpfx1 207.41.110.0/24 -> 207.41.111.0/24"), -1
+ )
self.assertGreater(
- cont.find(
- '>Renumbering netixlan1 AS1 207.41.110.37 -> 207.41.111.37'
- ), -1)
+ cont.find(">Renumbering netixlan1 AS1 207.41.110.37 -> 207.41.111.37"), -1
+ )
self.assertGreater(
- cont.find(
- '>Renumbering netixlan2 AS1 207.41.110.38 -> 207.41.111.38'
- ), -1)
+ cont.find(">Renumbering netixlan2 AS1 207.41.110.38 -> 207.41.111.38"), -1
+ )
self.assertGreater(
- cont.find(
- '>Renumbering netixlan3 AS1 207.41.110.39 -> 207.41.111.39'
- ), -1)
-
+ cont.find(">Renumbering netixlan3 AS1 207.41.110.39 -> 207.41.111.39"), -1
+ )
for netixlan in self.entities["netixlan"]:
netixlan.refresh_from_db()
- self.assertEqual(
- str(self.entities["netixlan"][0].ipaddr4), "207.41.111.37")
- self.assertEqual(
- str(self.entities["netixlan"][1].ipaddr4), "207.41.111.38")
- self.assertEqual(
- str(self.entities["netixlan"][2].ipaddr4), "207.41.111.39")
+ self.assertEqual(str(self.entities["netixlan"][0].ipaddr4), "207.41.111.37")
+ self.assertEqual(str(self.entities["netixlan"][1].ipaddr4), "207.41.111.38")
+ self.assertEqual(str(self.entities["netixlan"][2].ipaddr4), "207.41.111.39")
diff --git a/tests/test_api.py b/tests/test_api.py
index 0d379c65..198ef623 100644
--- a/tests/test_api.py
+++ b/tests/test_api.py
@@ -33,9 +33,9 @@ def setup_module(module):
ASN_RANGE_OVERRIDE = range(9000000, 9000999)
with open(
- os.path.join(
- os.path.dirname(__file__), "data", "api",
- "rdap_override.json"), "r") as fh:
+ os.path.join(os.path.dirname(__file__), "data", "api", "rdap_override.json"),
+ "r",
+ ) as fh:
pdbinet.RdapLookup.override_result = json.load(fh)
def get_asn(self, asn):
@@ -94,13 +94,12 @@ def __init__(self, *args, **kwargs):
self.user_inst = models.User.objects.get(username="guest")
self.api_client.force_authenticate(self.user_inst)
- def _request(self, typ, id=0, method="GET", params=None, data=None,
- url=None):
+ def _request(self, typ, id=0, method="GET", params=None, data=None, url=None):
if not url:
if id:
url = "/api/%s/%s" % (typ, id)
else:
- url = "/api/%s" % (typ, )
+ url = "/api/%s" % (typ,)
fnc = getattr(self.api_client, method.lower())
if not data:
@@ -138,25 +137,29 @@ def setUpTestData(cls):
user_group = Group.objects.create(name="user")
guest_user = models.User.objects.create_user(
- "guest", "guest@localhost", "guest")
+ "guest", "guest@localhost", "guest"
+ )
guest_group.user_set.add(guest_user)
nsp.models.GroupPermission.objects.create(
- group=guest_group, namespace="peeringdb.organization",
- permissions=0x01)
+ group=guest_group, namespace="peeringdb.organization", permissions=0x01
+ )
nsp.models.GroupPermission.objects.create(
- group=user_group, namespace="peeringdb.organization",
- permissions=0x01)
+ group=user_group, namespace="peeringdb.organization", permissions=0x01
+ )
nsp.models.GroupPermission.objects.create(
- group=user_group, namespace="peeringdb.organization.{}".format(
- settings.SUGGEST_ENTITY_ORG), permissions=0x04)
+ group=user_group,
+ namespace="peeringdb.organization.{}".format(settings.SUGGEST_ENTITY_ORG),
+ permissions=0x04,
+ )
nsp.models.GroupPermission.objects.create(
group=user_group,
namespace="peeringdb.organization.*.network.*.poc_set.users",
- permissions=0x01)
+ permissions=0x01,
+ )
# prepare api test data
cls.prepare()
diff --git a/tests/test_api_cache.py b/tests/test_api_cache.py
index 9c24997d..4c3e969c 100644
--- a/tests/test_api_cache.py
+++ b/tests/test_api_cache.py
@@ -53,34 +53,38 @@ def setUpTestData(cls):
user_group = Group.objects.create(name="user")
guest_user = models.User.objects.create_user(
- "guest", "guest@localhost", "guest")
+ "guest", "guest@localhost", "guest"
+ )
guest_group.user_set.add(guest_user)
nsp.models.GroupPermission.objects.create(
- group=guest_group, namespace="peeringdb.organization",
- permissions=0x01)
+ group=guest_group, namespace="peeringdb.organization", permissions=0x01
+ )
nsp.models.GroupPermission.objects.create(
- group=user_group, namespace="peeringdb.organization",
- permissions=0x01)
+ group=user_group, namespace="peeringdb.organization", permissions=0x01
+ )
nsp.models.GroupPermission.objects.create(
- group=user_group, namespace="peeringdb.organization.{}".format(
- settings.SUGGEST_ENTITY_ORG), permissions=0x04)
+ group=user_group,
+ namespace="peeringdb.organization.{}".format(settings.SUGGEST_ENTITY_ORG),
+ permissions=0x04,
+ )
nsp.models.GroupPermission.objects.create(
group=user_group,
namespace="peeringdb.organization.*.network.*.poc_set.users",
- permissions=0x01)
+ permissions=0x01,
+ )
# prepare api test data
cls.prepare()
settings.API_CACHE_ROOT = tempfile.mkdtemp()
- settings.API_CACHE_LOG = os.path.join(settings.API_CACHE_ROOT,
- "log.log")
+ settings.API_CACHE_LOG = os.path.join(settings.API_CACHE_ROOT, "log.log")
super_user = models.User.objects.create_user(
- "admin", "admin@localhost", "admin")
+ "admin", "admin@localhost", "admin"
+ )
super_user.is_superuser = True
super_user.is_staff = True
super_user.save()
diff --git a/tests/test_api_compat.py b/tests/test_api_compat.py
index 8af7faae..deb81ac7 100644
--- a/tests/test_api_compat.py
+++ b/tests/test_api_compat.py
@@ -11,15 +11,16 @@ class TestAPIClientCompat(ClientCase):
@classmethod
def setUpTestData(cls):
super(TestAPIClientCompat, cls).setUpTestData()
- cls.superuser = User.objects.create_user("su", "su@localhost", "su",
- is_superuser=True)
- cls.org = REFTAG_MAP["org"].objects.create(name="Test Org",
- status="ok")
+ cls.superuser = User.objects.create_user(
+ "su", "su@localhost", "su", is_superuser=True
+ )
+ cls.org = REFTAG_MAP["org"].objects.create(name="Test Org", status="ok")
@property
def expected_compat_err_str(self):
return "Your client version is incompatible with server version of the api, please install peeringdb>={},<={} {}>={},<={}".format(
- "0.6", "0.6.5", "django_peeringdb", "0.6", "0.6.5")
+ "0.6", "0.6.5", "django_peeringdb", "0.6", "0.6.5"
+ )
def _compat(self, ua_c, ua_be, error):
if ua_c and ua_be:
@@ -37,11 +38,9 @@ def _compat(self, ua_c, ua_be, error):
else:
assert r.status_code == 200
- r = self.client.post("/api/net", {
- "org_id": 1,
- "name": "Test net",
- "asn": 9000000
- }, format="json")
+ r = self.client.post(
+ "/api/net", {"org_id": 1, "name": "Test net", "asn": 9000000}, format="json"
+ )
content = json.loads(r.content)
if error:
assert r.status_code == 400
@@ -52,8 +51,7 @@ def _compat(self, ua_c, ua_be, error):
net = content["data"][0]
del net["org"]
- r = self.client.put("/api/net/{}".format(net["id"]), net,
- format="json")
+ r = self.client.put("/api/net/{}".format(net["id"]), net, format="json")
content = json.loads(r.content)
if error:
assert r.status_code == 400
@@ -61,8 +59,7 @@ def _compat(self, ua_c, ua_be, error):
else:
assert r.status_code == 200
- r = self.client.delete("/api/net/{}".format(net["id"]), {},
- format="json")
+ r = self.client.delete("/api/net/{}".format(net["id"]), {}, format="json")
if error:
content = json.loads(r.content)
assert r.status_code == 400
diff --git a/tests/test_asn_automation.py b/tests/test_asn_automation.py
index 77644fb0..d94169e9 100644
--- a/tests/test_asn_automation.py
+++ b/tests/test_asn_automation.py
@@ -11,8 +11,9 @@
from util import SettingsCase
ERR_COULD_NOT_GET_RIR_ENTRY = "RDAP Lookup Error: Test Not Found"
-ERR_BOGON_ASN = "RDAP Lookup Error: ASNs in this range are not allowed " \
- "in this environment"
+ERR_BOGON_ASN = (
+ "RDAP Lookup Error: ASNs in this range are not allowed " "in this environment"
+)
RdapLookup_get_asn = pdbinet.RdapLookup.get_asn
@@ -31,9 +32,9 @@ def setup_module(module):
ASN_RANGE_OVERRIDE = range(9000000, 9000999)
with open(
- os.path.join(
- os.path.dirname(__file__), "data", "api",
- "rdap_override.json"), "r") as fh:
+ os.path.join(os.path.dirname(__file__), "data", "api", "rdap_override.json"),
+ "r",
+ ) as fh:
pdbinet.RdapLookup.override_result = json.load(fh)
def get_asn(self, asn):
@@ -63,9 +64,11 @@ def setUpTestData(cls):
user_group = Group.objects.create(name="user")
with open(
- os.path.join(
- os.path.dirname(__file__), "data", "api",
- "rdap_override.json"), "r") as fh:
+ os.path.join(
+ os.path.dirname(__file__), "data", "api", "rdap_override.json"
+ ),
+ "r",
+ ) as fh:
data = json.load(fh)
cls.rdap_63311 = pdbinet.RdapAsn(data)
cls.rdap_63311_no_name = pdbinet.RdapAsn(data)
@@ -76,24 +79,28 @@ def setUpTestData(cls):
cls.ticket = {}
for ticket_name in [
- "asnauto-9000001-org-net-created.txt",
- "asnauto-9000001-user-granted-ownership.txt",
- "asnauto-9000002-user-requested-ownership.txt"
+ "asnauto-9000001-org-net-created.txt",
+ "asnauto-9000001-user-granted-ownership.txt",
+ "asnauto-9000002-user-requested-ownership.txt",
]:
with open(
- os.path.join(
- os.path.dirname(__file__), "data", "deskpro",
- ticket_name), "r") as fh:
+ os.path.join(os.path.dirname(__file__), "data", "deskpro", ticket_name),
+ "r",
+ ) as fh:
cls.ticket[ticket_name] = fh.read()
- cls.base_org = models.Organization.objects.create(
- name="ASN Automation Tests")
+ cls.base_org = models.Organization.objects.create(name="ASN Automation Tests")
- for username, email in [("user_a", "Neteng@20c.com"),
- ("user_b", "neteng@other.com"),
- ("user_c", "other@20c.com")]:
- setattr(cls, username,
- models.User.objects.create_user(username, email, username))
+ for username, email in [
+ ("user_a", "Neteng@20c.com"),
+ ("user_b", "neteng@other.com"),
+ ("user_c", "other@20c.com"),
+ ]:
+ setattr(
+ cls,
+ username,
+ models.User.objects.create_user(username, email, username),
+ )
getattr(cls, username).set_password(username)
cls.base_org.usergroup.user_set.add(getattr(cls, username))
user_group.user_set.add(getattr(cls, username))
@@ -102,27 +109,29 @@ def setUp(self):
self.factory = RequestFactory()
def test_org_create_from_rdap(self):
- org, created = models.Organization.create_from_rdap(
- self.rdap_63311, 63311)
+ org, created = models.Organization.create_from_rdap(self.rdap_63311, 63311)
self.assertEqual(org.name, "20C, LLC")
- org_2, created = models.Organization.create_from_rdap(
- self.rdap_63311, 63311)
+ org_2, created = models.Organization.create_from_rdap(self.rdap_63311, 63311)
self.assertEqual(org_2.id, org.id)
org, created = models.Organization.create_from_rdap(
- self.rdap_63311_no_name, 63311)
+ self.rdap_63311_no_name, 63311
+ )
self.assertEqual(org.name, "AS63311")
def test_net_create_from_rdap(self):
- net, created = models.Network.create_from_rdap(self.rdap_63311, 63311,
- self.base_org)
+ net, created = models.Network.create_from_rdap(
+ self.rdap_63311, 63311, self.base_org
+ )
self.assertEqual(net.name, "AS-20C")
- net, created = models.Network.create_from_rdap(self.rdap_63311, 63312,
- self.base_org)
+ net, created = models.Network.create_from_rdap(
+ self.rdap_63311, 63312, self.base_org
+ )
self.assertEqual(net.name, "AS-20C !")
- net, created = models.Network.create_from_rdap(self.rdap_63311_no_name,
- 63313, self.base_org)
+ net, created = models.Network.create_from_rdap(
+ self.rdap_63311_no_name, 63313, self.base_org
+ )
self.assertEqual(net.name, "AS63313")
def test_validate_rdap_relationship(self):
@@ -142,9 +151,7 @@ def test_affiliate(self):
asn_fail = 890000
# test 1: test affiliation to asn that has no RiR entry
- request = self.factory.post("/affiliate-to-org", data={
- "asn": asn_fail
- })
+ request = self.factory.post("/affiliate-to-org", data={"asn": asn_fail})
request.user = self.user_a
request._dont_enforce_csrf_checks = True
resp = json.loads(pdbviews.view_affiliate_to_org(request).content)
@@ -160,34 +167,31 @@ def test_affiliate(self):
# check that support tickets were created
ticket = models.DeskProTicket.objects.get(
- subject=
- "[test][ASNAUTO] Organization 'ORG AS9000001', Network 'AS9000001' created"
+ subject="[test][ASNAUTO] Organization 'ORG AS9000001', Network 'AS9000001' created"
+ )
+ self.assertEqual(
+ ticket.body, self.ticket["asnauto-9000001-org-net-created.txt"]
)
- self.assertEqual(ticket.body,
- self.ticket["asnauto-9000001-org-net-created.txt"])
ticket = models.DeskProTicket.objects.get(
- subject=
- "[test][ASNAUTO] Ownership claim granted to Org 'ORG AS9000001' for user 'user_a'"
+ subject="[test][ASNAUTO] Ownership claim granted to Org 'ORG AS9000001' for user 'user_a'"
)
self.assertEqual(
- ticket.body,
- self.ticket["asnauto-9000001-user-granted-ownership.txt"])
+ ticket.body, self.ticket["asnauto-9000001-user-granted-ownership.txt"]
+ )
net = models.Network.objects.get(asn=asn_ok)
self.assertEqual(net.name, "AS%d" % asn_ok)
self.assertEqual(net.org.name, "ORG AS%d" % asn_ok)
self.assertEqual(
- self.user_a.groups.filter(
- name=net.org.admin_usergroup.name).exists(), True)
+ self.user_a.groups.filter(name=net.org.admin_usergroup.name).exists(), True
+ )
self.assertEqual(net.status, "ok")
self.assertEqual(net.org.status, "ok")
# test 3: test affiliation to asn that hsa RiR entry and user relationship
# cannot be verifiedi (ASN 9000002)
- request = self.factory.post("/affiliate-to-org", data={
- "asn": asn_ok_b
- })
+ request = self.factory.post("/affiliate-to-org", data={"asn": asn_ok_b})
request.user = self.user_b
request._dont_enforce_csrf_checks = True
resp = json.loads(pdbviews.view_affiliate_to_org(request).content)
@@ -195,18 +199,18 @@ def test_affiliate(self):
# check that support tickets were created
ticket = models.DeskProTicket.objects.get(
- subject=
- "[test]User user_b wishes to request ownership of ORG AS9000002")
+ subject="[test]User user_b wishes to request ownership of ORG AS9000002"
+ )
self.assertEqual(
- ticket.body,
- self.ticket["asnauto-9000002-user-requested-ownership.txt"])
+ ticket.body, self.ticket["asnauto-9000002-user-requested-ownership.txt"]
+ )
net = models.Network.objects.get(asn=asn_ok_b)
self.assertEqual(net.name, "AS%d" % asn_ok_b)
self.assertEqual(net.org.name, "ORG AS%d" % asn_ok_b)
self.assertEqual(
- self.user_b.groups.filter(
- name=net.org.admin_usergroup.name).exists(), False)
+ self.user_b.groups.filter(name=net.org.admin_usergroup.name).exists(), False
+ )
self.assertEqual(net.status, "ok")
self.assertEqual(net.org.status, "ok")
@@ -215,27 +219,27 @@ def test_affiliate_to_bogon_asn(self):
tests affiliation with non-existant asn
"""
asns = []
- for a,b in pdbinet.BOGON_ASN_RANGES:
- asns.extend([a,b])
+ for a, b in pdbinet.BOGON_ASN_RANGES:
+ asns.extend([a, b])
for asn in asns:
- request = self.factory.post("/affiliate-to-org", data={
- "asn": asn})
+ request = self.factory.post("/affiliate-to-org", data={"asn": asn})
request.user = self.user_a
request._dont_enforce_csrf_checks = True
resp = json.loads(pdbviews.view_affiliate_to_org(request).content)
self.assertEqual(resp.get("asn"), ERR_BOGON_ASN)
-
def test_claim_ownership(self):
"""
tests ownership to org via asn RiR validation
"""
org = models.Organization.objects.create(
- status="ok", name="test_claim_ownership ORG")
+ status="ok", name="test_claim_ownership ORG"
+ )
net = models.Network.objects.create(
- status="ok", name="test_claim_ownership NET", asn=9000100, org=org)
+ status="ok", name="test_claim_ownership NET", asn=9000100, org=org
+ )
request = self.factory.post("/request-ownership", data={"id": org.id})
request.user = self.user_a
@@ -245,17 +249,19 @@ def test_claim_ownership(self):
self.assertEqual(resp.get("status"), "ok")
self.assertEqual(resp.get("ownership_status"), "approved")
self.assertEqual(
- self.user_a.groups.filter(name=org.admin_usergroup.name).exists(),
- True)
+ self.user_a.groups.filter(name=org.admin_usergroup.name).exists(), True
+ )
def test_claim_ownership_validation_failure(self):
"""
test failure to claim ownership to org via asn RiR validation
"""
org = models.Organization.objects.create(
- status="ok", name="test_claim_ownership ORG")
+ status="ok", name="test_claim_ownership ORG"
+ )
net = models.Network.objects.create(
- status="ok", name="test_claim_ownership NET", asn=9000100, org=org)
+ status="ok", name="test_claim_ownership NET", asn=9000100, org=org
+ )
request = self.factory.post("/request-ownership", data={"id": org.id})
request.user = self.user_b
@@ -265,11 +271,12 @@ def test_claim_ownership_validation_failure(self):
self.assertEqual(resp.get("status"), "ok")
self.assertEqual(resp.get("ownership_status"), "pending")
self.assertEqual(
- self.user_b.groups.filter(name=org.admin_usergroup.name).exists(),
- False)
+ self.user_b.groups.filter(name=org.admin_usergroup.name).exists(), False
+ )
+
class TestTutorialMode(SettingsCase):
- settings = {"TUTORIAL_MODE":True}
+ settings = {"TUTORIAL_MODE": True}
def setUp(self):
super(TestTutorialMode, self).setUp()
@@ -280,18 +287,17 @@ def test_affiliate_to_bogon_asn(self):
tests affiliation with non-existant bogon asn
with tutorial mode enabled those should be allowed
"""
- user = get_user_model().objects.create_user("user_a", "user_a@localhost", "user_a")
+ user = get_user_model().objects.create_user(
+ "user_a", "user_a@localhost", "user_a"
+ )
asns = []
- for a,b in pdbinet.TUTORIAL_ASN_RANGES:
- asns.extend([a,b])
+ for a, b in pdbinet.TUTORIAL_ASN_RANGES:
+ asns.extend([a, b])
for asn in asns:
- request = self.factory.post("/affiliate-to-org", data={
- "asn": asn})
+ request = self.factory.post("/affiliate-to-org", data={"asn": asn})
request.user = user
request._dont_enforce_csrf_checks = True
resp = json.loads(pdbviews.view_affiliate_to_org(request).content)
self.assertEqual(resp.get("status"), "ok")
-
-
diff --git a/tests/test_autocomplete.py b/tests/test_autocomplete.py
index b527cc5f..9e24ee05 100644
--- a/tests/test_autocomplete.py
+++ b/tests/test_autocomplete.py
@@ -14,8 +14,9 @@ class TestAutocomplete(ClientCase):
@classmethod
def setUpTestData(cls):
super(TestAutocomplete, cls).setUpTestData()
- cls.staff_user = User.objects.create_user("staff", "staff@localhost",
- "staff", is_staff=True)
+ cls.staff_user = User.objects.create_user(
+ "staff", "staff@localhost", "staff", is_staff=True
+ )
def setUp(self):
self.factory = RequestFactory()
diff --git a/tests/test_cmd_renumber_lans.py b/tests/test_cmd_renumber_lans.py
index a5b0b5c8..25594e1f 100644
--- a/tests/test_cmd_renumber_lans.py
+++ b/tests/test_cmd_renumber_lans.py
@@ -10,7 +10,6 @@
class TestRenumberLans(ClientCase):
-
@classmethod
def setUpTestData(cls):
super(TestRenumberLans, cls).setUpTestData()
@@ -25,17 +24,28 @@ def test_run(self):
ixlan = ix.ixlan_set_active.all().first()
- call_command("pdb_renumber_lans", ix=1, old=u"206.223.116.0/23", new=u"206.223.110.0/23", commit=True)
+ call_command(
+ "pdb_renumber_lans",
+ ix=1,
+ old=u"206.223.116.0/23",
+ new=u"206.223.110.0/23",
+ commit=True,
+ )
assert ixlan.ixpfx_set.get(id=1).prefix.compressed == u"206.223.110.0/23"
assert ixlan.netixlan_set.get(id=1).ipaddr4.compressed == u"206.223.110.101"
- call_command("pdb_renumber_lans", ix=1, old=u"2001:504:0:1::/64", new=u"2001:504:0:2::/64", commit=True)
+ call_command(
+ "pdb_renumber_lans",
+ ix=1,
+ old=u"2001:504:0:1::/64",
+ new=u"2001:504:0:2::/64",
+ commit=True,
+ )
assert ixlan.ixpfx_set.get(id=2).prefix.compressed == u"2001:504:0:2::/64"
assert ixlan.netixlan_set.get(id=1).ipaddr6.compressed == u"2001:504:0:2::65"
-
def test_skip_deleted(self):
"""
test that `pdb_renumber_lans` command skips deleted prefixes and
@@ -52,19 +62,28 @@ def test_skip_deleted(self):
for netixlan in ixlan.netixlan_set.all():
netixlan.delete()
-
- call_command("pdb_renumber_lans", ix=1, old=u"206.223.116.0/23", new=u"206.223.110.0/23", commit=True)
+ call_command(
+ "pdb_renumber_lans",
+ ix=1,
+ old=u"206.223.116.0/23",
+ new=u"206.223.110.0/23",
+ commit=True,
+ )
assert ixlan.ixpfx_set.get(id=1).prefix.compressed == u"206.223.116.0/23"
assert ixlan.netixlan_set.get(id=1).ipaddr4.compressed == u"206.223.116.101"
- call_command("pdb_renumber_lans", ix=1, old=u"2001:504:0:1::/64", new=u"2001:504:0:2::/64", commit=True)
+ call_command(
+ "pdb_renumber_lans",
+ ix=1,
+ old=u"2001:504:0:1::/64",
+ new=u"2001:504:0:2::/64",
+ commit=True,
+ )
assert ixlan.ixpfx_set.get(id=2).prefix.compressed == u"2001:504:0:1::/64"
assert ixlan.netixlan_set.get(id=1).ipaddr6.compressed == u"2001:504:0:1::65"
-
-
def test_ignore_diff_address_space(self):
""""
Test that `pdb_renumber_lans` command soft errors on netixlans that
@@ -77,24 +96,39 @@ def test_ignore_diff_address_space(self):
out = StringIO.StringIO()
- call_command("pdb_renumber_lans", ix=1, old=u"206.223.114.0/23",
- new=u"206.223.110.0/23", commit=True, stdout=out)
+ call_command(
+ "pdb_renumber_lans",
+ ix=1,
+ old=u"206.223.114.0/23",
+ new=u"206.223.110.0/23",
+ commit=True,
+ stdout=out,
+ )
assert ixlan.ixpfx_set.get(id=1).prefix.compressed == u"206.223.116.0/23"
assert ixlan.netixlan_set.get(id=1).ipaddr4.compressed == u"206.223.116.101"
output = out.getvalue()
- assert output.find("[error] 206.223.116.101: Ip address not within old prefix") > -1
-
- call_command("pdb_renumber_lans", ix=1, old=u"2001:504:0:3::/64",
- new=u"2001:504:0:2::/64", commit=True, stdout=out)
+ assert (
+ output.find("[error] 206.223.116.101: Ip address not within old prefix")
+ > -1
+ )
+
+ call_command(
+ "pdb_renumber_lans",
+ ix=1,
+ old=u"2001:504:0:3::/64",
+ new=u"2001:504:0:2::/64",
+ commit=True,
+ stdout=out,
+ )
assert ixlan.ixpfx_set.get(id=2).prefix.compressed == u"2001:504:0:1::/64"
assert ixlan.netixlan_set.get(id=1).ipaddr6.compressed == u"2001:504:0:1::65"
output = out.getvalue()
- assert output.find("[error] 2001:504:0:1::65: Ip address not within old prefix") > -1
-
-
-
+ assert (
+ output.find("[error] 2001:504:0:1::65: Ip address not within old prefix")
+ > -1
+ )
diff --git a/tests/test_cors.py b/tests/test_cors.py
index 78811077..98b12af5 100644
--- a/tests/test_cors.py
+++ b/tests/test_cors.py
@@ -2,9 +2,13 @@
from peeringdb_server.models import Organization, User
from util import ClientCase
from corsheaders.middleware import (
- ACCESS_CONTROL_ALLOW_CREDENTIALS, ACCESS_CONTROL_ALLOW_HEADERS,
- ACCESS_CONTROL_ALLOW_METHODS, ACCESS_CONTROL_ALLOW_ORIGIN,
- ACCESS_CONTROL_EXPOSE_HEADERS, ACCESS_CONTROL_MAX_AGE)
+ ACCESS_CONTROL_ALLOW_CREDENTIALS,
+ ACCESS_CONTROL_ALLOW_HEADERS,
+ ACCESS_CONTROL_ALLOW_METHODS,
+ ACCESS_CONTROL_ALLOW_ORIGIN,
+ ACCESS_CONTROL_EXPOSE_HEADERS,
+ ACCESS_CONTROL_MAX_AGE,
+)
class CorsTest(ClientCase):
@@ -21,8 +25,7 @@ def assert_cors_allowed(self, url, method):
self.assertIn(resp.status_code, [200, 301])
self.assertIn(ACCESS_CONTROL_ALLOW_METHODS, resp)
self.assertIn(ACCESS_CONTROL_ALLOW_ORIGIN, resp)
- self.assertIn(method.upper(),
- resp[ACCESS_CONTROL_ALLOW_METHODS].split(", "))
+ self.assertIn(method.upper(), resp[ACCESS_CONTROL_ALLOW_METHODS].split(", "))
self.assertIn("origin", resp[ACCESS_CONTROL_ALLOW_HEADERS].split(", "))
self.assertEqual(resp[ACCESS_CONTROL_ALLOW_ORIGIN], self.test_origin)
@@ -31,8 +34,9 @@ def assert_cors_denied(self, url, method):
self.assertIn(resp.status_code, [200, 301])
if ACCESS_CONTROL_ALLOW_METHODS in resp:
- self.assertNotIn(method.upper(),
- resp[ACCESS_CONTROL_ALLOW_METHODS].split(", "))
+ self.assertNotIn(
+ method.upper(), resp[ACCESS_CONTROL_ALLOW_METHODS].split(", ")
+ )
def test_cors_GET(self):
self.assert_cors_allowed("/api", method="get")
diff --git a/tests/test_entity_view.py b/tests/test_entity_view.py
index f56c0028..8f632d28 100644
--- a/tests/test_entity_view.py
+++ b/tests/test_entity_view.py
@@ -23,44 +23,54 @@ def setUpTestData(cls):
user_group = Group.objects.create(name="user")
cls.guest_user = models.User.objects.create_user(
- "guest", "guest@localhost", "guest")
+ "guest", "guest@localhost", "guest"
+ )
cls.guest_user.set_password("guest")
guest_group.user_set.add(cls.guest_user)
nsp.models.GroupPermission.objects.create(
- group=guest_group, namespace="peeringdb.organization",
- permissions=0x01)
+ group=guest_group, namespace="peeringdb.organization", permissions=0x01
+ )
nsp.models.GroupPermission.objects.create(
- group=user_group, namespace="peeringdb.organization",
- permissions=0x01)
+ group=user_group, namespace="peeringdb.organization", permissions=0x01
+ )
nsp.models.GroupPermission.objects.create(
group=user_group,
namespace="peeringdb.organization.*.network.*.poc_set.users",
- permissions=0x01)
+ permissions=0x01,
+ )
nsp.models.GroupPermission.objects.create(
group=guest_group,
namespace="peeringdb.organization.*.network.*.poc_set.public",
- permissions=0x01)
+ permissions=0x01,
+ )
# create test users
for name in [
- "org_admin", "user_a", "user_b", "user_c", "user_d", "user_e",
- "user_f"
+ "org_admin",
+ "user_a",
+ "user_b",
+ "user_c",
+ "user_d",
+ "user_e",
+ "user_f",
]:
- setattr(cls, name,
- models.User.objects.create_user(
- name, "%s@localhost" % name, name))
+ setattr(
+ cls,
+ name,
+ models.User.objects.create_user(name, "%s@localhost" % name, name),
+ )
getattr(cls, name).set_password(name)
user_group.user_set.add(getattr(cls, name))
# create test org
- cls.org = models.Organization.objects.create(name="Test org",
- status="ok")
+ cls.org = models.Organization.objects.create(name="Test org", status="ok")
cls.org_other = models.Organization.objects.create(
- name="Test org other", status="ok")
+ name="Test org other", status="ok"
+ )
# create test entities
for tag in cls.entities:
@@ -120,14 +130,29 @@ def setUpTestData(cls):
ViewTestCase.setUpTestData()
# Create PoCs
models.NetworkContact.objects.create(
- network=cls.net, visible="Users", name="Contact Users",
- phone="12345", email="a@a.a", status="ok")
+ network=cls.net,
+ visible="Users",
+ name="Contact Users",
+ phone="12345",
+ email="a@a.a",
+ status="ok",
+ )
models.NetworkContact.objects.create(
- network=cls.net, visible="Public", name="Contact Public",
- phone="12345", email="a@a.a", status="ok")
+ network=cls.net,
+ visible="Public",
+ name="Contact Public",
+ phone="12345",
+ email="a@a.a",
+ status="ok",
+ )
models.NetworkContact.objects.create(
- network=cls.net, visible="Private", name="Contact Private",
- phone="12345", email="a@a.a", status="ok")
+ network=cls.net,
+ visible="Private",
+ name="Contact Private",
+ phone="12345",
+ email="a@a.a",
+ status="ok",
+ )
def test_view(self):
self.run_view_test("net")
@@ -174,4 +199,4 @@ def test_search_asn_redirect(self):
for q in ["as1", "asn1", "AS1", "ASN1"]:
resp = c.get("/search?q={}".format(q), follow=True)
self.assertEqual(resp.status_code, 200)
- self.assertEqual(resp.redirect_chain, [('/net/1', 302)])
+ self.assertEqual(resp.redirect_chain, [("/net/1", 302)])
diff --git a/tests/test_exporters.py b/tests/test_exporters.py
index e58c0878..7ec0061e 100644
--- a/tests/test_exporters.py
+++ b/tests/test_exporters.py
@@ -10,9 +10,15 @@
from util import ClientCase
-from peeringdb_server.models import (Organization, Network, InternetExchange,
- Facility, NetworkFacility, NetworkIXLan,
- IXLan)
+from peeringdb_server.models import (
+ Organization,
+ Network,
+ InternetExchange,
+ Facility,
+ NetworkFacility,
+ NetworkIXLan,
+ IXLan,
+)
class AdvancedSearchExportTest(ClientCase):
@@ -35,54 +41,79 @@ def setUpTestData(cls):
# create networks
cls.net = [
Network.objects.create(
- name="Network {}".format(i), status="ok",
- aka="AKA {}".format(i), policy_general="Open",
- info_traffic="0-20 Mbps", asn=i, org=cls.org)
+ name="Network {}".format(i),
+ status="ok",
+ aka="AKA {}".format(i),
+ policy_general="Open",
+ info_traffic="0-20 Mbps",
+ asn=i,
+ org=cls.org,
+ )
for i in entity_count
]
# create exchanges
cls.ix = [
InternetExchange.objects.create(
- name="Exchange {}".format(i), media="Ethernet",
- country=countries[i - 1], city="City {}".format(i),
- status="ok", org=cls.org) for i in entity_count
+ name="Exchange {}".format(i),
+ media="Ethernet",
+ country=countries[i - 1],
+ city="City {}".format(i),
+ status="ok",
+ org=cls.org,
+ )
+ for i in entity_count
]
# create facilities
cls.fac = [
Facility.objects.create(
- name="Facility {}".format(i), status="ok",
- city="City {}".format(i), clli="CLLI{}".format(i),
- state="State {}".format(i), npanxx="{}-{}".format(
- i, i), country=countries[i - 1], zipcode=i, org=cls.org)
+ name="Facility {}".format(i),
+ status="ok",
+ city="City {}".format(i),
+ clli="CLLI{}".format(i),
+ state="State {}".format(i),
+ npanxx="{}-{}".format(i, i),
+ country=countries[i - 1],
+ zipcode=i,
+ org=cls.org,
+ )
for i in entity_count
]
# create network facility relationships
cls.netfac = [
NetworkFacility.objects.create(
- network=cls.net[i - 1], facility=cls.fac[i - 1], status="ok")
+ network=cls.net[i - 1], facility=cls.fac[i - 1], status="ok"
+ )
for i in entity_count
]
# create ixlans
cls.ixlan = [
- IXLan.objects.create(ix=cls.ix[i - 1], status="ok")
- for i in entity_count
+ IXLan.objects.create(ix=cls.ix[i - 1], status="ok") for i in entity_count
]
# create netixlans
cls.netixlan = [
- NetworkIXLan.objects.create(ixlan=cls.ixlan[i - 1],
- network=cls.net[i - 1], asn=i, speed=0,
- status="ok") for i in entity_count
+ NetworkIXLan.objects.create(
+ ixlan=cls.ixlan[i - 1],
+ network=cls.net[i - 1],
+ asn=i,
+ speed=0,
+ status="ok",
+ )
+ for i in entity_count
]
def expected_data(self, tag, fmt):
path = os.path.join(
- os.path.dirname(__file__), "data", "export", "advancedsearch",
- "{}.{}".format(tag, fmt))
+ os.path.dirname(__file__),
+ "data",
+ "export",
+ "advancedsearch",
+ "{}.{}".format(tag, fmt),
+ )
with open(path, "r") as fh:
data = fh.read().rstrip()
return data
@@ -90,78 +121,77 @@ def expected_data(self, tag, fmt):
def test_export_net_json(self):
""" test json export of network search """
client = Client()
- response = client.get(
- "/export/advanced-search/net/json?name_search=Network")
+ response = client.get("/export/advanced-search/net/json?name_search=Network")
self.assertEqual(
- json.loads(response.content),
- json.loads(self.expected_data("net", "json")))
+ json.loads(response.content), json.loads(self.expected_data("net", "json"))
+ )
def test_export_net_json_pretty(self):
""" test pretty json export of network search """
client = Client()
response = client.get(
- "/export/advanced-search/net/json-pretty?name_search=Network")
- self.assertEqual(response.content,
- self.expected_data("net", "jsonpretty"))
+ "/export/advanced-search/net/json-pretty?name_search=Network"
+ )
+ self.assertEqual(response.content, self.expected_data("net", "jsonpretty"))
def test_export_net_csv(self):
""" test csv export of network search """
client = Client()
- response = client.get(
- "/export/advanced-search/net/csv?name_search=Network")
+ response = client.get("/export/advanced-search/net/csv?name_search=Network")
self.assertEqual(
response.content.replace("\r\n", "\n").rstrip(),
- self.expected_data("net", "csv"))
+ self.expected_data("net", "csv"),
+ )
def test_export_fac_json(self):
""" test json export of facility search """
client = Client()
response = client.get(
- "/export/advanced-search/fac/json?name__contains=Facility")
+ "/export/advanced-search/fac/json?name__contains=Facility"
+ )
self.assertEqual(
- json.loads(response.content),
- json.loads(self.expected_data("fac", "json")))
+ json.loads(response.content), json.loads(self.expected_data("fac", "json"))
+ )
def test_export_fac_json_pretty(self):
""" test pretty json export of facility search """
client = Client()
response = client.get(
- "/export/advanced-search/fac/json-pretty?name__contains=Facility")
- self.assertEqual(response.content,
- self.expected_data("fac", "jsonpretty"))
+ "/export/advanced-search/fac/json-pretty?name__contains=Facility"
+ )
+ self.assertEqual(response.content, self.expected_data("fac", "jsonpretty"))
def test_export_fac_csv(self):
""" test csv export of facility search """
client = Client()
- response = client.get(
- "/export/advanced-search/fac/csv?name__contains=Facility")
+ response = client.get("/export/advanced-search/fac/csv?name__contains=Facility")
self.assertEqual(
response.content.replace("\r\n", "\n").rstrip(),
- self.expected_data("fac", "csv"))
+ self.expected_data("fac", "csv"),
+ )
def test_export_ix_json(self):
""" test json export of exchange search """
client = Client()
- response = client.get(
- "/export/advanced-search/ix/json?name__contains=Exchange")
+ response = client.get("/export/advanced-search/ix/json?name__contains=Exchange")
self.assertEqual(
- json.loads(response.content),
- json.loads(self.expected_data("ix", "json")))
+ json.loads(response.content), json.loads(self.expected_data("ix", "json"))
+ )
def test_export_ix_json_pretty(self):
""" test pretty json export of exchange search """
client = Client()
response = client.get(
- "/export/advanced-search/ix/json-pretty?name__contains=Exchange")
+ "/export/advanced-search/ix/json-pretty?name__contains=Exchange"
+ )
- self.assertEqual(response.content,
- self.expected_data("ix", "jsonpretty"))
+ self.assertEqual(response.content, self.expected_data("ix", "jsonpretty"))
def test_export_ix_csv(self):
""" test csv export of exchange search """
client = Client()
- response = client.get(
- "/export/advanced-search/ix/csv?name__contains=Exchange")
+ response = client.get("/export/advanced-search/ix/csv?name__contains=Exchange")
self.assertEqual(
response.content.replace("\r\n", "\n").rstrip(),
- self.expected_data("ix", "csv"))
+ self.expected_data("ix", "csv"),
+ )
diff --git a/tests/test_geocode.py b/tests/test_geocode.py
index c4bcc420..f5464c38 100644
--- a/tests/test_geocode.py
+++ b/tests/test_geocode.py
@@ -20,26 +20,43 @@ class ViewTestCase(TestCase):
def setUpTestData(cls):
# create organizations
- cls.organizations = dict((k,
- models.Organization.objects.create(
- name="Geocode Org %s" % k, status="ok"))
- for k in ["a", "b", "c", "d"])
+ cls.organizations = dict(
+ (
+ k,
+ models.Organization.objects.create(
+ name="Geocode Org %s" % k, status="ok"
+ ),
+ )
+ for k in ["a", "b", "c", "d"]
+ )
# create facilities
cls.facilities = dict(
- (k,
- models.Facility.objects.create(
- name=u"Geocode Fac {}".format(k), status="ok", org=cls.organizations[
- k], address1="Some street", address2=k, city="Chicago",
- country="US", state="IL", zipcode="1234", latitude=1.23,
- longitude=-1.23, geocode_status=True))
- for k in ["a", "b", "c", "d"])
+ (
+ k,
+ models.Facility.objects.create(
+ name=u"Geocode Fac {}".format(k),
+ status="ok",
+ org=cls.organizations[k],
+ address1="Some street",
+ address2=k,
+ city="Chicago",
+ country="US",
+ state="IL",
+ zipcode="1234",
+ latitude=1.23,
+ longitude=-1.23,
+ geocode_status=True,
+ ),
+ )
+ for k in ["a", "b", "c", "d"]
+ )
def test_base(self):
- self.assertEqual(self.facilities["a"].geocode_address,
- u"Some street a, Chicago, IL 1234")
- self.assertEqual(self.facilities["a"].geocode_coordinates,
- (1.23, -1.23))
+ self.assertEqual(
+ self.facilities["a"].geocode_address, u"Some street a, Chicago, IL 1234"
+ )
+ self.assertEqual(self.facilities["a"].geocode_coordinates, (1.23, -1.23))
def test_change(self):
self.assertEqual(self.facilities["b"].geocode_status, True)
@@ -51,7 +68,7 @@ def test_change(self):
self.facilities["c"].save()
self.assertEqual(self.facilities["c"].geocode_status, True)
self.assertEqual(self.facilities["d"].geocode_status, True)
- self.facilities["d"].website = 'http://www.test.com'
+ self.facilities["d"].website = "http://www.test.com"
self.facilities["d"].save()
self.assertEqual(self.facilities["d"].geocode_status, True)
diff --git a/tests/test_inet.py b/tests/test_inet.py
index 3cb8b9f9..211b6980 100644
--- a/tests/test_inet.py
+++ b/tests/test_inet.py
@@ -2,11 +2,7 @@
import pytest_filedata
import ipaddress
-from peeringdb_server.inet import (
- RdapLookup,
- RdapNotFoundError,
- renumber_ipaddress
-)
+from peeringdb_server.inet import RdapLookup, RdapNotFoundError, renumber_ipaddress
def test_rdap_asn_lookup(rdap):
@@ -36,7 +32,7 @@ def test_mocker(rdap):
@pytest_filedata.RequestsData("rdap")
def test_arin0(rdap):
asn = rdap.get_asn(63311)
- assert asn.emails == ['neteng@20c.com']
+ assert asn.emails == ["neteng@20c.com"]
def test_recurse_contacts(rdap):
@@ -45,6 +41,7 @@ def test_recurse_contacts(rdap):
assert len(asn.emails) > 1
assert len(rdap.history) > 1
+
def test_renumber_ipaddress():
ip4 = renumber_ipaddress(
ipaddress.ip_address(u"206.41.110.48"),
@@ -82,5 +79,3 @@ def test_renumber_ipaddress():
ipaddress.ip_network(u"206.41.0.0/21"),
ipaddress.ip_network(u"206.41.111.0/24"),
)
-
-
diff --git a/tests/test_inet_parse.py b/tests/test_inet_parse.py
index b685c10a..83c5231f 100644
--- a/tests/test_inet_parse.py
+++ b/tests/test_inet_parse.py
@@ -5,14 +5,17 @@
def assert_parsed(data, parsed):
# dump in json format for easily adding expected
- print("echo \\\n'{}'\\\n > {}/{}.expected".format(
- data.dumps(parsed), data.path, data.name))
+ print(
+ "echo \\\n'{}'\\\n > {}/{}.expected".format(
+ data.dumps(parsed), data.path, data.name
+ )
+ )
assert data.expected == parsed
@pytest_filedata.RequestsData("rdap", real_http=True)
def test_rdap_asn_lookup(rdap, data_rdap_autnum):
print(data_rdap_autnum.name)
- #asn = rdap.get_asn(205726)
+ # asn = rdap.get_asn(205726)
asn = rdap.get_asn(data_rdap_autnum.name)
assert_parsed(data_rdap_autnum, asn.parsed())
diff --git a/tests/test_ixf_member_import.py b/tests/test_ixf_member_import.py
index 6267ce4b..f340a559 100644
--- a/tests/test_ixf_member_import.py
+++ b/tests/test_ixf_member_import.py
@@ -12,14 +12,22 @@
from django.core.management import call_command
from peeringdb_server.models import (
- Organization, Network, NetworkIXLan, IXLan, IXLanPrefix, InternetExchange,
- IXLanIXFMemberImportAttempt, IXLanIXFMemberImportLog,
- IXLanIXFMemberImportLogEntry, User)
+ Organization,
+ Network,
+ NetworkIXLan,
+ IXLan,
+ IXLanPrefix,
+ InternetExchange,
+ IXLanIXFMemberImportAttempt,
+ IXLanIXFMemberImportLog,
+ IXLanIXFMemberImportLogEntry,
+ User,
+)
from peeringdb_server.import_views import (
view_import_ixlan_ixf_preview,
view_import_net_ixf_preview,
view_import_net_ixf_postmortem,
- )
+)
from peeringdb_server import ixf
from util import ClientCase
@@ -45,9 +53,14 @@ def setUpTestData(cls):
# load json members list data to test against
with open(
- os.path.join(
- os.path.dirname(__file__), "data", "json_members_list",
- "members.{}.json".format(cls.version)), "r") as fh:
+ os.path.join(
+ os.path.dirname(__file__),
+ "data",
+ "json_members_list",
+ "members.{}.json".format(cls.version),
+ ),
+ "r",
+ ) as fh:
cls.json_data = json.load(fh)
with reversion.create_revision():
@@ -58,89 +71,145 @@ def setUpTestData(cls):
# create exchange(s)
cls.entities["ix"] = [
- InternetExchange.objects.create(name="Test Exchange",
- org=cls.entities["org"][0],
- status="ok")
+ InternetExchange.objects.create(
+ name="Test Exchange", org=cls.entities["org"][0], status="ok"
+ )
]
# create ixlan(s)
cls.entities["ixlan"] = [
IXLan.objects.create(ix=cls.entities["ix"][0], status="ok"),
IXLan.objects.create(ix=cls.entities["ix"][0], status="ok"),
- IXLan.objects.create(ix=cls.entities["ix"][0], status="ok")
+ IXLan.objects.create(ix=cls.entities["ix"][0], status="ok"),
]
# create ixlan prefix(s)
cls.entities["ixpfx"] = [
IXLanPrefix.objects.create(
- ixlan=cls.entities["ixlan"][0], status="ok",
- prefix="195.69.144.0/22", protocol="IPv4"),
+ ixlan=cls.entities["ixlan"][0],
+ status="ok",
+ prefix="195.69.144.0/22",
+ protocol="IPv4",
+ ),
IXLanPrefix.objects.create(
- ixlan=cls.entities["ixlan"][0], status="ok",
- prefix="2001:7f8:1::/64", protocol="IPv6"),
+ ixlan=cls.entities["ixlan"][0],
+ status="ok",
+ prefix="2001:7f8:1::/64",
+ protocol="IPv6",
+ ),
IXLanPrefix.objects.create(
- ixlan=cls.entities["ixlan"][1], status="ok",
- prefix="195.66.224.0/22", protocol="IPv4"),
+ ixlan=cls.entities["ixlan"][1],
+ status="ok",
+ prefix="195.66.224.0/22",
+ protocol="IPv4",
+ ),
IXLanPrefix.objects.create(
- ixlan=cls.entities["ixlan"][1], status="ok",
- prefix="2001:7f8:4::/64", protocol="IPv6")
+ ixlan=cls.entities["ixlan"][1],
+ status="ok",
+ prefix="2001:7f8:4::/64",
+ protocol="IPv6",
+ ),
]
# create network(s)
cls.entities["net"] = [
Network.objects.create(
- name="Netflix", org=cls.entities["org"][0], asn=2906,
- info_prefixes4=42, info_prefixes6=42,
- website="http://netflix.com/", policy_general="Open",
+ name="Netflix",
+ org=cls.entities["org"][0],
+ asn=2906,
+ info_prefixes4=42,
+ info_prefixes6=42,
+ website="http://netflix.com/",
+ policy_general="Open",
policy_url="https://www.netflix.com/openconnect/",
- allow_ixp_update=True, status="ok", irr_as_set="AS-NFLX"),
- Network.objects.create(name="Network with deleted netixlans",
- org=cls.entities["org"][0], asn=1001,
- allow_ixp_update=True, status="ok"),
+ allow_ixp_update=True,
+ status="ok",
+ irr_as_set="AS-NFLX",
+ ),
+ Network.objects.create(
+ name="Network with deleted netixlans",
+ org=cls.entities["org"][0],
+ asn=1001,
+ allow_ixp_update=True,
+ status="ok",
+ ),
Network.objects.create(
name="Network with allow ixp update off",
- org=cls.entities["org"][0], asn=1002, status="ok")
+ org=cls.entities["org"][0],
+ asn=1002,
+ status="ok",
+ ),
]
# create netixlans
cls.entities["netixlan"] = [
NetworkIXLan.objects.create(
network=cls.entities["net"][1],
- ixlan=cls.entities["ixlan"][1], asn=1001, speed=10000,
- ipaddr4="195.69.146.250", ipaddr6=None, status="deleted"),
+ ixlan=cls.entities["ixlan"][1],
+ asn=1001,
+ speed=10000,
+ ipaddr4="195.69.146.250",
+ ipaddr6=None,
+ status="deleted",
+ ),
NetworkIXLan.objects.create(
network=cls.entities["net"][1],
- ixlan=cls.entities["ixlan"][1], asn=1001, speed=10000,
- ipaddr4=None, ipaddr6="2001:7f8:1::a500:2906:1",
- status="deleted"),
+ ixlan=cls.entities["ixlan"][1],
+ asn=1001,
+ speed=10000,
+ ipaddr4=None,
+ ipaddr6="2001:7f8:1::a500:2906:1",
+ status="deleted",
+ ),
NetworkIXLan.objects.create(
network=cls.entities["net"][0],
- ixlan=cls.entities["ixlan"][0], asn=2906, speed=10000,
- ipaddr4="195.69.146.249", ipaddr6=None, status="ok"),
+ ixlan=cls.entities["ixlan"][0],
+ asn=2906,
+ speed=10000,
+ ipaddr4="195.69.146.249",
+ ipaddr6=None,
+ status="ok",
+ ),
NetworkIXLan.objects.create(
network=cls.entities["net"][0],
- ixlan=cls.entities["ixlan"][0], asn=2906, speed=10000,
- ipaddr4="195.69.146.251", ipaddr6=None, status="ok"),
+ ixlan=cls.entities["ixlan"][0],
+ asn=2906,
+ speed=10000,
+ ipaddr4="195.69.146.251",
+ ipaddr6=None,
+ status="ok",
+ ),
NetworkIXLan.objects.create(
network=cls.entities["net"][0],
- ixlan=cls.entities["ixlan"][0], asn=2906, speed=20000, is_rs_peer=False,
- ipaddr4="195.69.147.251", ipaddr6=None, status="ok"),
+ ixlan=cls.entities["ixlan"][0],
+ asn=2906,
+ speed=20000,
+ is_rs_peer=False,
+ ipaddr4="195.69.147.251",
+ ipaddr6=None,
+ status="ok",
+ ),
NetworkIXLan.objects.create(
network=cls.entities["net"][0],
- ixlan=cls.entities["ixlan"][0], asn=1002, speed=10000,
- ipaddr4="195.69.147.252", ipaddr6=None, status="ok"),
+ ixlan=cls.entities["ixlan"][0],
+ asn=1002,
+ speed=10000,
+ ipaddr4="195.69.147.252",
+ ipaddr6=None,
+ status="ok",
+ ),
]
- cls.admin_user = User.objects.create_user("admin","admin@localhost","admin")
+ cls.admin_user = User.objects.create_user("admin", "admin@localhost", "admin")
cls.entities["org"][0].admin_usergroup.user_set.add(cls.admin_user)
-
-
def setUp(self):
self.ixf_importer = ixf.Importer()
def assertLog(self, log, expected):
- path = os.path.join(os.path.dirname(__file__), "data", "ixf", "logs", "{}.json".format(expected))
+ path = os.path.join(
+ os.path.dirname(__file__), "data", "ixf", "logs", "{}.json".format(expected)
+ )
with open(path, "r") as fh:
self.assertEqual(log, json.load(fh))
@@ -148,11 +217,12 @@ def test_update_from_ixf_ixp_member_list(self):
ixlan = self.entities["ixlan"][0]
n_deleted = self.entities["netixlan"][0]
n_deleted2 = self.entities["netixlan"][1]
- self.assertEqual(unicode(n_deleted.ipaddr4), u'195.69.146.250')
- self.assertEqual(
- unicode(n_deleted2.ipaddr6), u'2001:7f8:1::a500:2906:1')
+ self.assertEqual(unicode(n_deleted.ipaddr4), u"195.69.146.250")
+ self.assertEqual(unicode(n_deleted2.ipaddr6), u"2001:7f8:1::a500:2906:1")
self.assertEqual(ixlan.netixlan_set_active.count(), 4)
- r, netixlans, netixlans_deleted, log = self.ixf_importer.update(ixlan, data=self.json_data)
+ r, netixlans, netixlans_deleted, log = self.ixf_importer.update(
+ ixlan, data=self.json_data
+ )
self.assertLog(log, "update_01")
self.assertEqual(len(netixlans), 5)
@@ -175,33 +245,42 @@ def test_update_from_ixf_ixp_member_list(self):
self.assertEqual(n.asn, 2906)
# test that inactive connections had no effect
- self.assertEqual(NetworkIXLan.objects.filter(ipaddr4="195.69.146.251", speed=10000, status="ok").count(), 1)
- self.assertEqual(NetworkIXLan.objects.filter(ipaddr4="195.69.146.252").count(), 0)
-
+ self.assertEqual(
+ NetworkIXLan.objects.filter(
+ ipaddr4="195.69.146.251", speed=10000, status="ok"
+ ).count(),
+ 1,
+ )
+ self.assertEqual(
+ NetworkIXLan.objects.filter(ipaddr4="195.69.146.252").count(), 0
+ )
- #self.assertEqual(IXLan.objects.get(id=ixlan.id).netixlan_set_active.count(), 2)
+ # self.assertEqual(IXLan.objects.get(id=ixlan.id).netixlan_set_active.count(), 2)
- #FIXME: this is not practical until
- #https://github.com/peeringdb/peeringdb/issues/90 is resolved
- #so skipping those tests right now
- #n_deleted.refresh_from_db()
- #n_deleted2.refresh_from_db()
- #self.assertEqual(n_deleted.ipaddr4, None)
- #self.assertEqual(n_deleted2.ipaddr6, None)
+ # FIXME: this is not practical until
+ # https://github.com/peeringdb/peeringdb/issues/90 is resolved
+ # so skipping those tests right now
+ # n_deleted.refresh_from_db()
+ # n_deleted2.refresh_from_db()
+ # self.assertEqual(n_deleted.ipaddr4, None)
+ # self.assertEqual(n_deleted2.ipaddr6, None)
def test_preview_from_ixf_ixp_member_list(self):
ixlan = self.entities["ixlan"][0]
- r, netixlans, netixlans_deleted, log = self.ixf_importer.update(ixlan, data=self.json_data, save=False)
+ r, netixlans, netixlans_deleted, log = self.ixf_importer.update(
+ ixlan, data=self.json_data, save=False
+ )
self.assertLog(log, "preview_01")
-
def test_update_from_ixf_ixp_member_list_skip_prefix_mismatch(self):
"""
Here we test that entries with ipaddresses that cannot be validated
against any of the prefixes that exist on the ixlan get skipped
"""
ixlan = self.entities["ixlan"][1]
- r, netixlans, netixlans_deleted, log = self.ixf_importer.update(ixlan, data=self.json_data)
+ r, netixlans, netixlans_deleted, log = self.ixf_importer.update(
+ ixlan, data=self.json_data
+ )
self.assertLog(log, "skip_prefix_mismatch")
self.assertEqual(len(netixlans), 0)
@@ -212,11 +291,13 @@ def test_update_from_ixf_ixp_member_list_skip_missing_prefixes(self):
ixlan that does not have any prefixes
"""
ixlan = self.entities["ixlan"][2]
- r, netixlans, netixlans_deleted, log = self.ixf_importer.update(ixlan, data=self.json_data)
+ r, netixlans, netixlans_deleted, log = self.ixf_importer.update(
+ ixlan, data=self.json_data
+ )
self.assertEqual(len(netixlans), 0)
self.assertEqual(len(netixlans_deleted), 0)
- self.assertEqual(log["errors"], [u'No prefixes defined on ixlan'])
+ self.assertEqual(log["errors"], [u"No prefixes defined on ixlan"])
def test_update_from_ixf_ixp_member_list_skip_disabled_networks(self):
"""
@@ -227,7 +308,9 @@ def test_update_from_ixf_ixp_member_list_skip_disabled_networks(self):
network = self.entities["net"][0]
network.allow_ixp_update = False
network.save()
- r, netixlans, netixlans_deleted, log = self.ixf_importer.update(ixlan, data=self.json_data)
+ r, netixlans, netixlans_deleted, log = self.ixf_importer.update(
+ ixlan, data=self.json_data
+ )
self.assertLog(log, "skip_disabled_networks")
self.assertEqual(len(netixlans), 0)
@@ -238,37 +321,43 @@ def test_update_from_ixf_ixp_member_list_skip_disabled_networks(self):
def test_update_from_ixf_ixp_member_list_logs(self):
ixlan = self.entities["ixlan"][0]
- r, netixlans, netixlans_deleted, log = self.ixf_importer.update(ixlan, data=self.json_data)
+ r, netixlans, netixlans_deleted, log = self.ixf_importer.update(
+ ixlan, data=self.json_data
+ )
attempt_dt_1 = ixlan.ixf_import_attempt.updated
for netixlan in netixlans:
- log_entry = ixlan.ixf_import_log_set.last().entries.get(
- netixlan=netixlan)
+ log_entry = ixlan.ixf_import_log_set.last().entries.get(netixlan=netixlan)
- if netixlan.id in (self.entities["netixlan"][4].id, self.entities["netixlan"][5].id):
+ if netixlan.id in (
+ self.entities["netixlan"][4].id,
+ self.entities["netixlan"][5].id,
+ ):
# netixlan was modified
self.assertEqual(
log_entry.version_before,
- reversion.models.Version.objects.get_for_object(netixlan)[1])
+ reversion.models.Version.objects.get_for_object(netixlan)[1],
+ )
else:
# netixlan was added
- self.assertEqual(
- log_entry.version_before, None)
+ self.assertEqual(log_entry.version_before, None)
self.assertEqual(
log_entry.version_after,
- reversion.models.Version.objects.get_for_object(netixlan)[0])
+ reversion.models.Version.objects.get_for_object(netixlan)[0],
+ )
for netixlan in netixlans_deleted:
- log_entry = ixlan.ixf_import_log_set.last().entries.get(
- netixlan=netixlan)
+ log_entry = ixlan.ixf_import_log_set.last().entries.get(netixlan=netixlan)
self.assertEqual(
log_entry.version_before,
- reversion.models.Version.objects.get_for_object(netixlan)[1])
+ reversion.models.Version.objects.get_for_object(netixlan)[1],
+ )
self.assertEqual(
log_entry.version_after,
- reversion.models.Version.objects.get_for_object(netixlan)[0])
+ reversion.models.Version.objects.get_for_object(netixlan)[0],
+ )
with reversion.create_revision():
netixlans[0].speed = 10
@@ -276,7 +365,9 @@ def test_update_from_ixf_ixp_member_list_logs(self):
time.sleep(0.1)
- r, netixlans, netixlans_deleted, log = self.ixf_importer.update(ixlan, data=self.json_data)
+ r, netixlans, netixlans_deleted, log = self.ixf_importer.update(
+ ixlan, data=self.json_data
+ )
ixlan.ixf_import_attempt.refresh_from_db()
attempt_dt_2 = ixlan.ixf_import_attempt.updated
@@ -286,18 +377,21 @@ def test_update_from_ixf_ixp_member_list_logs(self):
self.assertEqual(len(netixlans), 1)
for netixlan in netixlans:
- log_entry = ixlan.ixf_import_log_set.last().entries.get(
- netixlan=netixlan)
+ log_entry = ixlan.ixf_import_log_set.last().entries.get(netixlan=netixlan)
self.assertEqual(
log_entry.version_before,
- reversion.models.Version.objects.get_for_object(netixlan)[1])
+ reversion.models.Version.objects.get_for_object(netixlan)[1],
+ )
self.assertEqual(
log_entry.version_after,
- reversion.models.Version.objects.get_for_object(netixlan)[0])
+ reversion.models.Version.objects.get_for_object(netixlan)[0],
+ )
def test_rollback(self):
ixlan = self.entities["ixlan"][0]
- r, netixlans, netixlans_deleted, log = self.ixf_importer.update(ixlan, data=self.json_data)
+ r, netixlans, netixlans_deleted, log = self.ixf_importer.update(
+ ixlan, data=self.json_data
+ )
for entry in ixlan.ixf_import_log_set.last().entries.all():
self.assertEqual(entry.rollback_status(), 0)
@@ -315,14 +409,21 @@ def test_rollback(self):
def test_rollback_avoid_ipaddress_conflict(self):
ixlan = self.entities["ixlan"][0]
- r, netixlans, netixlans_deleted, log = self.ixf_importer.update(ixlan, data=self.json_data)
+ r, netixlans, netixlans_deleted, log = self.ixf_importer.update(
+ ixlan, data=self.json_data
+ )
self.assertEqual(len(netixlans_deleted), 2)
netixlan = netixlans_deleted[0]
other = NetworkIXLan.objects.create(
- network=netixlan.network, ixlan=netixlan.ixlan, speed=1000,
- status="ok", asn=netixlan.asn + 1, ipaddr4=netixlan.ipaddr4)
+ network=netixlan.network,
+ ixlan=netixlan.ixlan,
+ speed=1000,
+ status="ok",
+ asn=netixlan.asn + 1,
+ ipaddr4=netixlan.ipaddr4,
+ )
for entry in ixlan.ixf_import_log_set.last().entries.all():
if entry.netixlan == netixlan:
@@ -346,7 +447,9 @@ def test_export_view_ixlan(self):
# import the data
ixlan = self.entities["ixlan"][0]
- r, netixlans, netixlans_deleted, log = self.ixf_importer.update(ixlan, data=self.json_data)
+ r, netixlans, netixlans_deleted, log = self.ixf_importer.update(
+ ixlan, data=self.json_data
+ )
# request the view and compare it agaisnt expected data
c = Client()
@@ -354,9 +457,11 @@ def test_export_view_ixlan(self):
self.assertEqual(resp.status_code, 200)
data = json.loads(resp.content)
with open(
- os.path.join(
- os.path.dirname(__file__), "data", "json_members_list",
- "export.json"), "r") as fh:
+ os.path.join(
+ os.path.dirname(__file__), "data", "json_members_list", "export.json"
+ ),
+ "r",
+ ) as fh:
expected = json.load(fh)
data["timestamp"] = expected["timestamp"]
self.assertEqual(data, expected)
@@ -376,7 +481,9 @@ def test_export_view_ix(self):
# import the data
ixlan = self.entities["ixlan"][0]
- r, netixlans, netixlans_deleted, log = self.ixf_importer.update(ixlan, data=self.json_data)
+ r, netixlans, netixlans_deleted, log = self.ixf_importer.update(
+ ixlan, data=self.json_data
+ )
# request the view and compare it agaisnt expected data
c = Client()
@@ -384,9 +491,11 @@ def test_export_view_ix(self):
self.assertEqual(resp.status_code, 200)
data = json.loads(resp.content)
with open(
- os.path.join(
- os.path.dirname(__file__), "data", "json_members_list",
- "export.json"), "r") as fh:
+ os.path.join(
+ os.path.dirname(__file__), "data", "json_members_list", "export.json"
+ ),
+ "r",
+ ) as fh:
other = json.load(fh)
data["timestamp"] = other["timestamp"]
self.assertEqual(data, other)
@@ -410,7 +519,13 @@ def test_command(self):
stdout = StringIO.StringIO()
stderr = StringIO.StringIO()
- r = call_command("pdb_ixf_ixp_member_import", ixlan=[ixlan.id], commit=True, stdout=stdout, stderr=stderr)
+ r = call_command(
+ "pdb_ixf_ixp_member_import",
+ ixlan=[ixlan.id],
+ commit=True,
+ stdout=stdout,
+ stderr=stderr,
+ )
self.assertEqual(stdout.getvalue().find("Fetching data for -ixlan1 from"), 0)
# importer should skip ixlans where ixf_ixp_import_enabled is
@@ -422,14 +537,21 @@ def test_command(self):
stdout = StringIO.StringIO()
stderr = StringIO.StringIO()
- r = call_command("pdb_ixf_ixp_member_import", ixlan=[ixlan.id], commit=True, stdout=stdout, stderr=stderr)
+ r = call_command(
+ "pdb_ixf_ixp_member_import",
+ ixlan=[ixlan.id],
+ commit=True,
+ stdout=stdout,
+ stderr=stderr,
+ )
self.assertEqual(stdout.getvalue().find("Fetching data for -ixlan1 from"), -1)
-
def test_postmortem(self):
ixlan = self.entities["ixlan"][0]
net = self.entities["net"][0]
- r, netixlans, netixlans_deleted, log = self.ixf_importer.update(ixlan, data=self.json_data)
+ r, netixlans, netixlans_deleted, log = self.ixf_importer.update(
+ ixlan, data=self.json_data
+ )
request = RequestFactory().get("/import/net/{}/ixf/preview/".format(net.id))
request.user = self.admin_user
response = view_import_net_ixf_postmortem(request, net.id)
@@ -440,32 +562,38 @@ def test_postmortem(self):
del entry["created"]
self.assertLog(content, "postmortem_01")
-
-
def test_postmortem_limit(self):
ixlan = self.entities["ixlan"][0]
net = self.entities["net"][0]
- r, netixlans, netixlans_deleted, log = self.ixf_importer.update(ixlan, data=self.json_data)
- request = RequestFactory().get("/import/net/{}/ixf/postmortem/".format(net.id),{"limit":1})
+ r, netixlans, netixlans_deleted, log = self.ixf_importer.update(
+ ixlan, data=self.json_data
+ )
+ request = RequestFactory().get(
+ "/import/net/{}/ixf/postmortem/".format(net.id), {"limit": 1}
+ )
request.user = self.admin_user
response = view_import_net_ixf_postmortem(request, net.id)
content = json.loads(response.content)
assert len(content["data"]) == 1
-
def test_postmortem_limit_max(self):
ixlan = self.entities["ixlan"][0]
net = self.entities["net"][0]
- r, netixlans, netixlans_deleted, log = self.ixf_importer.update(ixlan, data=self.json_data)
- request = RequestFactory().get("/import/net/{}/ixf/postmortem/".format(net.id),{"limit":1000})
+ r, netixlans, netixlans_deleted, log = self.ixf_importer.update(
+ ixlan, data=self.json_data
+ )
+ request = RequestFactory().get(
+ "/import/net/{}/ixf/postmortem/".format(net.id), {"limit": 1000}
+ )
request.user = self.admin_user
response = view_import_net_ixf_postmortem(request, net.id)
content = json.loads(response.content)
assert len(content["data"]) == 6
- assert content["non_field_errors"] == ["Postmortem length cannot exceed 250 entries"]
-
+ assert content["non_field_errors"] == [
+ "Postmortem length cannot exceed 250 entries"
+ ]
def test_import_postmortem_fail_ratelimit(self):
net = self.entities["net"][0]
@@ -478,7 +606,6 @@ def test_import_postmortem_fail_ratelimit(self):
response = view_import_net_ixf_postmortem(request, net.id)
assert response.status_code == 400
-
def test_import_postmortem_fail_permission(self):
net = self.entities["net"][0]
request = RequestFactory().get("/import/net/{}/ixf/postmortem/".format(net.id))
@@ -487,7 +614,6 @@ def test_import_postmortem_fail_permission(self):
response = view_import_net_ixf_postmortem(request, net.id)
assert response.status_code == 403
-
def test_net_preview(self):
ixlan = self.entities["ixlan"][0]
net = self.entities["net"][0]
@@ -517,7 +643,6 @@ def test_net_preview_fail_ratelimit(self):
response = view_import_net_ixf_preview(request, net.id)
assert response.status_code == 400
-
def test_net_preview_fail_permission(self):
net = self.entities["net"][0]
request = RequestFactory().get("/import/net/{}/ixf/preview/".format(net.id))
@@ -527,9 +652,6 @@ def test_net_preview_fail_permission(self):
assert response.status_code == 403
-
-
-
class JsonMembersListTestCase_V05(JsonMembersListTestCase):
version = "0.5"
@@ -548,37 +670,46 @@ class TestImportPreview(ClientCase):
def setUpTestData(cls):
super(TestImportPreview, cls).setUpTestData()
cls.org = Organization.objects.create(name="Test Org", status="ok")
- cls.ix = InternetExchange.objects.create(name="Test IX", status="ok", org=cls.org)
+ cls.ix = InternetExchange.objects.create(
+ name="Test IX", status="ok", org=cls.org
+ )
cls.ixlan = IXLan.objects.create(status="ok", ix=cls.ix)
- IXLanPrefix.objects.create(ixlan=cls.ixlan, status="ok",
- prefix="195.69.144.0/22", protocol="IPv4")
- IXLanPrefix.objects.create(ixlan=cls.ixlan, status="ok",
- prefix="2001:7f8:1::/64", protocol="IPv6")
-
- cls.net = Network.objects.create(org=cls.org, status="ok",
- asn=1000, name="net01")
- cls.net_2 = Network.objects.create(org=cls.org, status="ok",
- asn=1001, name="net02")
-
-
- cls.admin_user = User.objects.create_user("admin","admin@localhost","admin")
+ IXLanPrefix.objects.create(
+ ixlan=cls.ixlan, status="ok", prefix="195.69.144.0/22", protocol="IPv4"
+ )
+ IXLanPrefix.objects.create(
+ ixlan=cls.ixlan, status="ok", prefix="2001:7f8:1::/64", protocol="IPv6"
+ )
+
+ cls.net = Network.objects.create(
+ org=cls.org, status="ok", asn=1000, name="net01"
+ )
+ cls.net_2 = Network.objects.create(
+ org=cls.org, status="ok", asn=1001, name="net02"
+ )
+
+ cls.admin_user = User.objects.create_user("admin", "admin@localhost", "admin")
cls.org.admin_usergroup.user_set.add(cls.admin_user)
-
def test_import_preview(self):
- request = RequestFactory().get("/import/ixlan/{}/ixf/preview/".format(self.ixlan.id))
+ request = RequestFactory().get(
+ "/import/ixlan/{}/ixf/preview/".format(self.ixlan.id)
+ )
request.user = self.admin_user
response = view_import_ixlan_ixf_preview(request, self.ixlan.id)
assert response.status_code == 200
- assert json.loads(response.content)["errors"] == ["IXF import url not specified"]
-
+ assert json.loads(response.content)["errors"] == [
+ "IXF import url not specified"
+ ]
def test_import_preview_fail_ratelimit(self):
- request = RequestFactory().get("/import/ixlan/{}/ixf/preview/".format(self.ixlan.id))
+ request = RequestFactory().get(
+ "/import/ixlan/{}/ixf/preview/".format(self.ixlan.id)
+ )
request.user = self.admin_user
response = view_import_ixlan_ixf_preview(request, self.ixlan.id)
@@ -587,26 +718,29 @@ def test_import_preview_fail_ratelimit(self):
response = view_import_ixlan_ixf_preview(request, self.ixlan.id)
assert response.status_code == 400
-
def test_import_preview_fail_permission(self):
- request = RequestFactory().get("/import/ixlan/{}/ixf/preview/".format(self.ixlan.id))
+ request = RequestFactory().get(
+ "/import/ixlan/{}/ixf/preview/".format(self.ixlan.id)
+ )
request.user = self.guest_user
response = view_import_ixlan_ixf_preview(request, self.ixlan.id)
assert response.status_code == 403
-
def test_import_net_preview(self):
- request = RequestFactory().get("/import/net/{}/ixf/preview/".format(self.net.id))
+ request = RequestFactory().get(
+ "/import/net/{}/ixf/preview/".format(self.net.id)
+ )
request.user = self.admin_user
response = view_import_net_ixf_preview(request, self.net.id)
assert response.status_code == 200
-
def test_import_net_preview_fail_ratelimit(self):
- request = RequestFactory().get("/import/net/{}/ixf/preview/".format(self.net.id))
+ request = RequestFactory().get(
+ "/import/net/{}/ixf/preview/".format(self.net.id)
+ )
request.user = self.admin_user
response = view_import_net_ixf_preview(request, self.net.id)
@@ -615,15 +749,15 @@ def test_import_net_preview_fail_ratelimit(self):
response = view_import_net_ixf_preview(request, self.net.id)
assert response.status_code == 400
-
def test_import_net_preview_fail_permission(self):
- request = RequestFactory().get("/import/net/{}/ixf/preview/".format(self.net.id))
+ request = RequestFactory().get(
+ "/import/net/{}/ixf/preview/".format(self.net.id)
+ )
request.user = self.guest_user
response = view_import_net_ixf_preview(request, self.net.id)
assert response.status_code == 403
-
def test_netixlan_diff(self):
netix1 = NetworkIXLan.objects.create(
network=self.net,
@@ -633,7 +767,8 @@ def test_netixlan_diff(self):
ipaddr6="2001:7f8:1::a500:2906:1",
asn=self.net.asn,
speed=1000,
- is_rs_peer=True)
+ is_rs_peer=True,
+ )
netix2 = NetworkIXLan(
network=self.net_2,
@@ -642,26 +777,22 @@ def test_netixlan_diff(self):
ipaddr6="2001:7f8:1::a500:2906:2",
asn=self.net_2.asn,
speed=10000,
- is_rs_peer=False)
-
- result = self.ixlan.add_netixlan(netix2, save=False,
- save_others=False)
+ is_rs_peer=False,
+ )
- self.assertEqual(sorted(result["changed"]), ['asn', 'ipaddr6',
- 'is_rs_peer', 'network_id', 'speed'])
+ result = self.ixlan.add_netixlan(netix2, save=False, save_others=False)
+ self.assertEqual(
+ sorted(result["changed"]),
+ ["asn", "ipaddr6", "is_rs_peer", "network_id", "speed"],
+ )
netix2.ipaddr4 = "195.69.146.251"
netix2.ipaddr6 = netix1.ipaddr6
- result = self.ixlan.add_netixlan(netix2, save=False,
- save_others=False)
-
- self.assertEqual(sorted(result["changed"]), ['asn', 'ipaddr4',
- 'is_rs_peer', 'network_id', 'speed'])
-
-
-
-
-
+ result = self.ixlan.add_netixlan(netix2, save=False, save_others=False)
+ self.assertEqual(
+ sorted(result["changed"]),
+ ["asn", "ipaddr4", "is_rs_peer", "network_id", "speed"],
+ )
diff --git a/tests/test_locale_files.py b/tests/test_locale_files.py
index 8f5da146..d2c965a7 100644
--- a/tests/test_locale_files.py
+++ b/tests/test_locale_files.py
@@ -9,7 +9,8 @@
class LocaleFilesTest(TestCase):
def load_messages(self, language, filename="django.po"):
path = os.path.join(
- os.path.dirname(__file__), "..", "locale", language, "LC_MESSAGES")
+ os.path.dirname(__file__), "..", "locale", language, "LC_MESSAGES"
+ )
with open(os.path.join(path, filename), "r") as fh:
content = fh.read()
message_id = re.findall(r"\nmsgid (.+)\n", content)
@@ -30,10 +31,13 @@ def _test_pt(self):
Test portuguese locale files
"""
self.assert_variables(
- self.load_messages("en_US"), self.load_messages("pt"), "PT")
+ self.load_messages("en_US"), self.load_messages("pt"), "PT"
+ )
self.assert_variables(
self.load_messages("en_US", filename="djangojs.po"),
- self.load_messages("pt", filename="djangojs.po"), "PT")
+ self.load_messages("pt", filename="djangojs.po"),
+ "PT",
+ )
def assert_variables(self, en_messages, other_messages, language):
"""
@@ -45,25 +49,26 @@ def assert_variables(self, en_messages, other_messages, language):
# %(name)s and %s type variables
variables_a = sorted(re.findall("%\([^\(]+\)s|%s", msgid))
- variables_b = sorted(
- re.findall("%\([^\(]+\)s|%s", other_messages[msgid]))
+ variables_b = sorted(re.findall("%\([^\(]+\)s|%s", other_messages[msgid]))
if variables_a != variables_b:
errors += 1
print "{} Locale variable error at msgid {} -> {}".format(
- language, msgid, other_messages[msgid])
+ language, msgid, other_messages[msgid]
+ )
# {name} and {} type variables
- variables_a = sorted([
- fn for _, fn, _, _ in Formatter().parse(msgid)
+ variables_a = sorted(
+ [fn for _, fn, _, _ in Formatter().parse(msgid) if fn is not None]
+ )
+ variables_b = [
+ fn
+ for _, fn, _, _ in Formatter().parse(other_messages[msgid])
if fn is not None
- ])
- variables_b = ([
- fn for _, fn, _, _ in Formatter().parse(other_messages[msgid])
- if fn is not None
- ])
+ ]
if variables_a != variables_b:
errors += 1
print "{} Locale variable error at msgid {} -> {}".format(
- language, msgid, other_messages[msgid])
+ language, msgid, other_messages[msgid]
+ )
assert errors == 0
diff --git a/tests/test_maintenance.py b/tests/test_maintenance.py
index 7495b69f..43a78263 100644
--- a/tests/test_maintenance.py
+++ b/tests/test_maintenance.py
@@ -11,21 +11,20 @@
from util import ClientCase
-class TestMaintenanceMode(ClientCase):
+class TestMaintenanceMode(ClientCase):
@classmethod
def setUpTestData(cls):
super(TestMaintenanceMode, cls).setUpTestData()
- cls.superuser = User.objects.create_user("su","su@localhost","su",is_superuser=True)
- cls.org = REFTAG_MAP["org"].objects.create(name="Test Org",
- status="ok")
-
+ cls.superuser = User.objects.create_user(
+ "su", "su@localhost", "su", is_superuser=True
+ )
+ cls.org = REFTAG_MAP["org"].objects.create(name="Test Org", status="ok")
@pytest.fixture(autouse=True)
def init_lockfile(self, tmpdir):
settings.MAINTENANCE_MODE_LOCKFILE = str(tmpdir.join("maintenance.lock"))
-
def test_signup(self):
"""
user signup should be blocked during maintenance
@@ -40,7 +39,6 @@ def test_signup(self):
maintenance.off()
-
def test_api(self):
"""
test that maintenance mode on blocks all write ops to the rest api
@@ -61,27 +59,22 @@ def test_api(self):
assert r.status_code == 200
# POST should be blocked
- r = self.client.post("/api/net", {
- "org_id": 1,
- "name": "Test net",
- "asn": 9000000
- }, format="json")
+ r = self.client.post(
+ "/api/net", {"org_id": 1, "name": "Test net", "asn": 9000000}, format="json"
+ )
content = json.loads(r.content)
assert r.status_code == 503
assert content["meta"]["error"].find(err_str) > -1
net = {"id": 1}
# PUT should be blocked
- r = self.client.put("/api/net/{}".format(net["id"]), net,
- format="json")
+ r = self.client.put("/api/net/{}".format(net["id"]), net, format="json")
content = json.loads(r.content)
assert r.status_code == 503
assert content["meta"]["error"].find(err_str) > -1
-
# DELETE should be blocked
- r = self.client.delete("/api/net/{}".format(net["id"]), {},
- format="json")
+ r = self.client.delete("/api/net/{}".format(net["id"]), {}, format="json")
content = json.loads(r.content)
assert r.status_code == 503
assert content["meta"]["error"].find(err_str) > -1
diff --git a/tests/test_orgadmin.py b/tests/test_orgadmin.py
index 11302a43..0e65f457 100644
--- a/tests/test_orgadmin.py
+++ b/tests/test_orgadmin.py
@@ -27,19 +27,26 @@ def setUpTestData(cls):
# create test users
for name in [
- "org_admin", "user_a", "user_b", "user_c", "user_d", "user_e",
- "user_f"
+ "org_admin",
+ "user_a",
+ "user_b",
+ "user_c",
+ "user_d",
+ "user_e",
+ "user_f",
]:
- setattr(cls, name,
- models.User.objects.create_user(
- name, "%s@localhost" % name, name))
+ setattr(
+ cls,
+ name,
+ models.User.objects.create_user(name, "%s@localhost" % name, name),
+ )
getattr(cls, name).set_password(name)
# create test org
- cls.org = models.Organization.objects.create(name="Test org",
- status="ok")
+ cls.org = models.Organization.objects.create(name="Test org", status="ok")
cls.org_other = models.Organization.objects.create(
- name="Test org other", status="ok")
+ name="Test org other", status="ok"
+ )
# create test entities
for tag in cls.entities:
@@ -64,23 +71,26 @@ def test_users(self):
"""
# test #1 - return a json response with the user we added to the org's member
# usergroup
- request = self.factory.get(
- "/org-admin/users?org_id=%d" % (self.org.id))
+ request = self.factory.get("/org-admin/users?org_id=%d" % (self.org.id))
request.user = self.org_admin
resp = json.loads(org_admin.users(request).content)
self.assertEqual(resp["status"], "ok")
- self.assertEqual(resp["users"], [{
- "id": self.user_a.id,
- "name": "%s <%s, %s>" % (self.user_a.full_name, self.user_a.email,
- self.user_a.username)
- }])
+ self.assertEqual(
+ resp["users"],
+ [
+ {
+ "id": self.user_a.id,
+ "name": "%s <%s, %s>"
+ % (self.user_a.full_name, self.user_a.email, self.user_a.username),
+ }
+ ],
+ )
# test #2 - return 403 response when trying to access the org where org_admin
# is not administrator
- request = self.factory.get(
- "/org-admin/users?org_id=%d" % (self.org_other.id))
+ request = self.factory.get("/org-admin/users?org_id=%d" % (self.org_other.id))
request.user = self.org_admin
resp = org_admin.users(request)
@@ -95,19 +105,14 @@ def test_load_all_user_permissions(self):
uid = self.user_a.id
perms = {
uid: {
- "perms": {
- "net.%d" % self.net.id: 0x01,
- "fac": 0x03
- },
+ "perms": {"net.%d" % self.net.id: 0x01, "fac": 0x03},
"id": self.user_a.id,
- "name": "%s <%s> %s" %
- (self.user_a.full_name, self.user_a.email,
- self.user_a.username)
+ "name": "%s <%s> %s"
+ % (self.user_a.full_name, self.user_a.email, self.user_a.username),
}
}
- org_admin.save_user_permissions(self.org, self.user_a,
- perms[uid]["perms"])
+ org_admin.save_user_permissions(self.org, self.user_a, perms[uid]["perms"])
perms_all = org_admin.load_all_user_permissions(self.org)
@@ -121,11 +126,12 @@ def test_user_permissions_update_remove(self):
# Test #1 - test updating a user a's permission to the org
url = "/org-admin/user_permissions/update?org_id=%d&user_id=%d" % (
- self.org.id, self.user_a.id)
- request = self.factory.post(url, data={
- "entity": "net.%d" % self.net.id,
- "perms": 0x03
- })
+ self.org.id,
+ self.user_a.id,
+ )
+ request = self.factory.post(
+ url, data={"entity": "net.%d" % self.net.id, "perms": 0x03}
+ )
request._dont_enforce_csrf_checks = True
request.user = self.org_admin
@@ -140,11 +146,12 @@ def test_user_permissions_update_remove(self):
# the org
url = "/org-admin/user_permissions/update?org_id=%d&user_id=%d" % (
- self.org.id, self.user_b.id)
- request = self.factory.post(url, data={
- "entity": "net.%d" % self.net.id,
- "perms": 0x03
- })
+ self.org.id,
+ self.user_b.id,
+ )
+ request = self.factory.post(
+ url, data={"entity": "net.%d" % self.net.id, "perms": 0x03}
+ )
request._dont_enforce_csrf_checks = True
request.user = self.org_admin
resp = org_admin.user_permission_update(request)
@@ -155,11 +162,12 @@ def test_user_permissions_update_remove(self):
# the admin of his org
url = "/org-admin/user_permissions/update?org_id=%d&user_id=%d" % (
- self.org_other.id, self.user_b.id)
- request = self.factory.post(url, data={
- "entity": "net.%d" % self.net.id,
- "perms": 0x03
- })
+ self.org_other.id,
+ self.user_b.id,
+ )
+ request = self.factory.post(
+ url, data={"entity": "net.%d" % self.net.id, "perms": 0x03}
+ )
request._dont_enforce_csrf_checks = True
request.user = self.org_admin
resp = org_admin.user_permission_update(request)
@@ -168,10 +176,10 @@ def test_user_permissions_update_remove(self):
# Test #4 - remove the permissions we just added
url = "/org-admin/user_permissions/remove?org_id=%d&user_id=%d" % (
- self.org.id, self.user_a.id)
- request = self.factory.post(url, data={
- "entity": "net.%d" % self.net.id
- })
+ self.org.id,
+ self.user_a.id,
+ )
+ request = self.factory.post(url, data={"entity": "net.%d" % self.net.id})
request._dont_enforce_csrf_checks = True
request.user = self.org_admin
@@ -185,10 +193,10 @@ def test_user_permissions_update_remove(self):
# Test #5 - should not be allowed remove user b's permissions as he
# is not a member of the org
url = "/org-admin/user_permissions/remove?org_id=%d&user_id=%d" % (
- self.org.id, self.user_b.id)
- request = self.factory.post(url, data={
- "entity": "net.%d" % self.net.id
- })
+ self.org.id,
+ self.user_b.id,
+ )
+ request = self.factory.post(url, data={"entity": "net.%d" % self.net.id})
request._dont_enforce_csrf_checks = True
request.user = self.org_admin
@@ -199,10 +207,10 @@ def test_user_permissions_update_remove(self):
# Test #6 - should not be allowed to remove user b's permissions as we
# are not the admin of his org
url = "/org-admin/user_permissions/remove?org_id=%d&user_id=%d" % (
- self.org_other.id, self.user_b.id)
- request = self.factory.post(url, data={
- "entity": "net.%d" % self.net.id
- })
+ self.org_other.id,
+ self.user_b.id,
+ )
+ request = self.factory.post(url, data={"entity": "net.%d" % self.net.id})
request._dont_enforce_csrf_checks = True
request.user = self.org_admin
@@ -219,7 +227,8 @@ def test_user_permissions(self):
# Test #1 - test user a's permission to the org
request = self.factory.get(
- "/org-admin/user_permissions?org_id=%d" % (self.org.id))
+ "/org-admin/user_permissions?org_id=%d" % (self.org.id)
+ )
request.user = self.org_admin
uid = str(self.user_a.id)
@@ -244,7 +253,8 @@ def test_user_permissions(self):
# Test #5 - no permissions to org
request = self.factory.get(
- "/org-admin/user_permissions?org_id=%d" % (self.org_other.id))
+ "/org-admin/user_permissions?org_id=%d" % (self.org_other.id)
+ )
request.user = self.org_admin
resp = org_admin.user_permissions(request)
@@ -258,9 +268,7 @@ def test_org_admin_tools(self):
"""
for tag in ["fac", "net", "ix"]:
- org_admin.save_user_permissions(self.org, self.user_a, {
- tag: PERM_CREATE
- })
+ org_admin.save_user_permissions(self.org, self.user_a, {tag: PERM_CREATE})
c = Client()
c.login(username=self.user_a.username, password="user_a")
resp = c.get("/org/%d" % self.org.id, follow=True)
@@ -286,10 +294,10 @@ def test_manage_user_delete(self):
self.assertEqual(self.user_e.is_org_admin(self.org), True)
# test #1 - remove user f (member) from org
- request = self.factory.post("/org-admin/manage_user/delete", {
- "org_id": self.org.id,
- "user_id": self.user_f.id
- })
+ request = self.factory.post(
+ "/org-admin/manage_user/delete",
+ {"org_id": self.org.id, "user_id": self.user_f.id},
+ )
request._dont_enforce_csrf_checks = True
request.user = self.org_admin
resp = org_admin.manage_user_delete(request)
@@ -300,10 +308,10 @@ def test_manage_user_delete(self):
self.assertEqual(self.user_f.is_org_admin(self.org), False)
# test #2 - remove user e (admin) from org
- request = self.factory.post("/org-admin/manage_user/delete", {
- "org_id": self.org.id,
- "user_id": self.user_e.id
- })
+ request = self.factory.post(
+ "/org-admin/manage_user/delete",
+ {"org_id": self.org.id, "user_id": self.user_e.id},
+ )
request._dont_enforce_csrf_checks = True
request.user = self.org_admin
resp = org_admin.manage_user_delete(request)
@@ -314,20 +322,20 @@ def test_manage_user_delete(self):
self.assertEqual(self.user_e.is_org_admin(self.org), False)
# test #3 - fail on user that is not currently in org
- request = self.factory.post("/org-admin/manage_user/delete", {
- "org_id": self.org.id,
- "user_id": self.user_d.id
- })
+ request = self.factory.post(
+ "/org-admin/manage_user/delete",
+ {"org_id": self.org.id, "user_id": self.user_d.id},
+ )
request._dont_enforce_csrf_checks = True
request.user = self.org_admin
resp = org_admin.manage_user_delete(request)
self.assertEqual(resp.status_code, 403)
# test #3 - fail on org that you are not an admin of
- request = self.factory.post("/org-admin/manage_user/delete", {
- "org_id": self.org_other.id,
- "user_id": self.user_d.id
- })
+ request = self.factory.post(
+ "/org-admin/manage_user/delete",
+ {"org_id": self.org_other.id, "user_id": self.user_d.id},
+ )
request._dont_enforce_csrf_checks = True
request.user = self.org_admin
resp = org_admin.manage_user_delete(request)
@@ -343,11 +351,10 @@ def test_manage_user_update(self):
self.assertEqual(self.user_a.is_org_admin(self.org), False)
# test #1 - move user a to admin group
- request = self.factory.post("/org-admin/manage_user/update", {
- "org_id": self.org.id,
- "user_id": self.user_a.id,
- "group": "admin"
- })
+ request = self.factory.post(
+ "/org-admin/manage_user/update",
+ {"org_id": self.org.id, "user_id": self.user_a.id, "group": "admin"},
+ )
request._dont_enforce_csrf_checks = True
request.user = self.org_admin
@@ -359,11 +366,10 @@ def test_manage_user_update(self):
self.assertEqual(self.user_a.is_org_admin(self.org), True)
# test #2 move back to member group
- request = self.factory.post("/org-admin/manage_user/update", {
- "org_id": self.org.id,
- "user_id": self.user_a.id,
- "group": "member"
- })
+ request = self.factory.post(
+ "/org-admin/manage_user/update",
+ {"org_id": self.org.id, "user_id": self.user_a.id, "group": "member"},
+ )
request._dont_enforce_csrf_checks = True
request.user = self.org_admin
@@ -375,11 +381,10 @@ def test_manage_user_update(self):
self.assertEqual(self.user_a.is_org_admin(self.org), False)
# test #3 - fail on user that is not currently in org
- request = self.factory.post("/org-admin/manage_user/update", {
- "org_id": self.org.id,
- "user_id": self.user_d.id,
- "group": "member"
- })
+ request = self.factory.post(
+ "/org-admin/manage_user/update",
+ {"org_id": self.org.id, "user_id": self.user_d.id, "group": "member"},
+ )
request._dont_enforce_csrf_checks = True
request.user = self.org_admin
resp = org_admin.manage_user_update(request)
@@ -387,11 +392,9 @@ def test_manage_user_update(self):
# test #3 - fail on org that you are not an admin of
request = self.factory.post(
- "/org-admin/manage_user/update", {
- "org_id": self.org_other.id,
- "user_id": self.user_d.id,
- "group": "admin"
- })
+ "/org-admin/manage_user/update",
+ {"org_id": self.org_other.id, "user_id": self.user_d.id, "group": "admin"},
+ )
request._dont_enforce_csrf_checks = True
request.user = self.org_admin
resp = org_admin.manage_user_update(request)
@@ -404,8 +407,7 @@ def test_permissions(self):
# Test #1 - retrieve permissioning ids for org
- request = self.factory.get(
- "/org-admin/permissions?org_id=%d" % self.org.id)
+ request = self.factory.get("/org-admin/permissions?org_id=%d" % self.org.id)
request.user = self.org_admin
resp = json.loads(org_admin.permissions(request).content)
@@ -420,7 +422,8 @@ def test_permissions(self):
# Test #2 - cannot retrieve ids for other org as we are not admin
request = self.factory.get(
- "/org-admin/permissions?org_id=%d" % self.org_other.id)
+ "/org-admin/permissions?org_id=%d" % self.org_other.id
+ )
request.user = self.org_admin
resp = org_admin.permissions(request)
self.assertEqual(resp.status_code, 403)
@@ -451,7 +454,7 @@ def test_extract_permission_id(self):
source = {
self.net.nsp_namespace: 0x01,
self.ix.nsp_namespace: 0x01,
- self.fac.nsp_namespace: 0x01
+ self.fac.nsp_namespace: 0x01,
}
# extract ids
@@ -460,11 +463,14 @@ def test_extract_permission_id(self):
org_admin.extract_permission_id(source, dest, self.ix, self.org)
org_admin.extract_permission_id(source, dest, self.fac, self.org)
- self.assertEqual({
- "net.%d" % self.net.id: 0x01,
- "ix.%d" % self.ix.id: 0x01,
- "fac.%d" % self.fac.id: 0x01
- }, dest)
+ self.assertEqual(
+ {
+ "net.%d" % self.net.id: 0x01,
+ "ix.%d" % self.ix.id: 0x01,
+ "fac.%d" % self.fac.id: 0x01,
+ },
+ dest,
+ )
# test with just the models
@@ -480,10 +486,8 @@ def test_extract_permission_id(self):
# extract ids
org_admin.extract_permission_id(source, dest, models.Network, self.org)
- org_admin.extract_permission_id(source, dest, models.InternetExchange,
- self.org)
- org_admin.extract_permission_id(source, dest, models.Facility,
- self.org)
+ org_admin.extract_permission_id(source, dest, models.InternetExchange, self.org)
+ org_admin.extract_permission_id(source, dest, models.Facility, self.org)
self.assertEqual({"net": 0x01, "fac": 0x03, "ix": 0x01}, dest)
@@ -495,45 +499,48 @@ def test_uoar_approve(self):
# create a uoar for user c
uoar = models.UserOrgAffiliationRequest.objects.create(
- user=self.user_c, asn=1, status="pending")
+ user=self.user_c, asn=1, status="pending"
+ )
# test that org id was properly derived from network asn
self.assertEqual(uoar.org.id, self.org.id)
# test approval
request = self.factory.post(
- "/org-admin/uoar/approve?org_id=%d" % self.org.id, data={
- "id": uoar.id
- })
+ "/org-admin/uoar/approve?org_id=%d" % self.org.id, data={"id": uoar.id}
+ )
request._dont_enforce_csrf_checks = True
request.user = self.org_admin
resp = json.loads(org_admin.uoar_approve(request).content)
- self.assertEqual({
- "status": "ok",
- "full_name": self.user_c.full_name,
- "id": self.user_c.id,
- "email": self.user_c.email
- }, resp)
+ self.assertEqual(
+ {
+ "status": "ok",
+ "full_name": self.user_c.full_name,
+ "id": self.user_c.id,
+ "email": self.user_c.email,
+ },
+ resp,
+ )
# check that user is now a member of the org
self.assertEqual(
- self.org.usergroup.user_set.filter(id=self.user_c.id).exists(),
- True)
+ self.org.usergroup.user_set.filter(id=self.user_c.id).exists(), True
+ )
# check that the UOAR is gone
self.assertEqual(
- models.UserOrgAffiliationRequest.objects.filter(
- id=uoar.id).exists(), False)
+ models.UserOrgAffiliationRequest.objects.filter(id=uoar.id).exists(), False
+ )
# test: we shouldnt be allowed to approve uoar's for the org we are not
# admins of
request = self.factory.post(
- "/org-admin/uoar/approve?org_id=%d" % self.org_other.id, data={
- "id": uoar.id
- })
+ "/org-admin/uoar/approve?org_id=%d" % self.org_other.id,
+ data={"id": uoar.id},
+ )
request._dont_enforce_csrf_checks = True
request.user = self.org_admin
resp = org_admin.uoar_approve(request)
@@ -544,12 +551,12 @@ def test_uoar_approve(self):
# be allowed
uoar_b = models.UserOrgAffiliationRequest.objects.create(
- user=self.user_d, asn=22, status="pending")
+ user=self.user_d, asn=22, status="pending"
+ )
request = self.factory.post(
- "/org-admin/uoar/approve?org_id=%d" % self.org.id, data={
- "id": uoar_b.id
- })
+ "/org-admin/uoar/approve?org_id=%d" % self.org.id, data={"id": uoar_b.id}
+ )
request._dont_enforce_csrf_checks = True
request.user = self.org_admin
resp = org_admin.uoar_approve(request)
@@ -566,29 +573,27 @@ def test_uoar_deny(self):
# create a uoar for user d
uoar = models.UserOrgAffiliationRequest.objects.create(
- user=self.user_d, asn=1, status="pending")
+ user=self.user_d, asn=1, status="pending"
+ )
# test that org id was properly derived from network asn
self.assertEqual(uoar.org.id, self.org.id)
# test deny
request = self.factory.post(
- "/org-admin/uoar/deny?org_id=%d" % self.org.id, data={
- "id": uoar.id
- })
+ "/org-admin/uoar/deny?org_id=%d" % self.org.id, data={"id": uoar.id}
+ )
request._dont_enforce_csrf_checks = True
request.user = self.org_admin
resp = json.loads(org_admin.uoar_deny(request).content)
- self.assertEqual({
- "status": "ok",
- }, resp)
+ self.assertEqual({"status": "ok",}, resp)
# check that user is not a member of the org
self.assertEqual(
- self.org.usergroup.user_set.filter(id=self.user_d.id).exists(),
- False)
+ self.org.usergroup.user_set.filter(id=self.user_d.id).exists(), False
+ )
# check that the UOAR is there, but status is denyed
uoar = models.UserOrgAffiliationRequest.objects.get(id=uoar.id)
@@ -598,9 +603,8 @@ def test_uoar_deny(self):
# admins of
request = self.factory.post(
- "/org-admin/uoar/deny?org_id=%d" % self.org_other.id, data={
- "id": uoar.id
- })
+ "/org-admin/uoar/deny?org_id=%d" % self.org_other.id, data={"id": uoar.id}
+ )
request._dont_enforce_csrf_checks = True
request.user = self.org_admin
resp = org_admin.uoar_approve(request)
@@ -611,12 +615,12 @@ def test_uoar_deny(self):
# be allowed
uoar_b = models.UserOrgAffiliationRequest.objects.create(
- user=self.user_d, asn=22, status="pending")
+ user=self.user_d, asn=22, status="pending"
+ )
request = self.factory.post(
- "/org-admin/uoar/deny?org_id=%d" % self.org.id, data={
- "id": uoar_b.id
- })
+ "/org-admin/uoar/deny?org_id=%d" % self.org.id, data={"id": uoar_b.id}
+ )
request._dont_enforce_csrf_checks = True
request.user = self.org_admin
resp = org_admin.uoar_deny(request)
diff --git a/tests/test_partners.py b/tests/test_partners.py
index 0d93d4a7..c869937a 100644
--- a/tests/test_partners.py
+++ b/tests/test_partners.py
@@ -24,27 +24,39 @@ def setUpTestData(cls):
user_group = Group.objects.create(name="user")
cls.guest_user = models.User.objects.create_user(
- "guest", "guest@localhost", "guest")
+ "guest", "guest@localhost", "guest"
+ )
cls.guest_user.set_password("guest")
guest_group.user_set.add(cls.guest_user)
# create organizations
- cls.organizations = dict((k,
- models.Organization.objects.create(
- name="Partner Org %s" % k, status="ok"))
- for k in ["a", "b", "c", "d"])
+ cls.organizations = dict(
+ (
+ k,
+ models.Organization.objects.create(
+ name="Partner Org %s" % k, status="ok"
+ ),
+ )
+ for k in ["a", "b", "c", "d"]
+ )
# create partnerships
cls.partnerships = {
"a": models.Partnership.objects.create(
- org=cls.organizations.get("a"), logo="fake.png",
- url="org-a.com", level=1),
+ org=cls.organizations.get("a"),
+ logo="fake.png",
+ url="org-a.com",
+ level=1,
+ ),
"b": models.Partnership.objects.create(
- org=cls.organizations.get("b"), logo="fake.png", level=1),
+ org=cls.organizations.get("b"), logo="fake.png", level=1
+ ),
"c": models.Partnership.objects.create(
- org=cls.organizations.get("c"), logo="fake.png", level=2),
+ org=cls.organizations.get("c"), logo="fake.png", level=2
+ ),
"d": models.Partnership.objects.create(
- org=cls.organizations.get("d"), level=1)
+ org=cls.organizations.get("d"), level=1
+ ),
}
def setUp(self):
@@ -59,20 +71,20 @@ def test_view(self):
resp = c.get("/partners", follow=True)
self.assertEqual(resp.status_code, 200)
- #make sure org a,b and c exist in the partners page
+ # make sure org a,b and c exist in the partners page
self.assertGreater(resp.content.find(self.organizations["a"].name), -1)
self.assertGreater(resp.content.find(self.organizations["b"].name), -1)
self.assertGreater(resp.content.find(self.organizations["c"].name), -1)
- #make sure org d does not exist in the partners page
+ # make sure org d does not exist in the partners page
self.assertEqual(resp.content.find(self.organizations["d"].name), -1)
- #make sure partnership a url exists in the partners page
+ # make sure partnership a url exists in the partners page
self.assertGreater(resp.content.find(self.partnerships["a"].url), -1)
- #makre sure order is randomized with each view
+ # makre sure order is randomized with each view
i = 0
- rgx = re.compile("fake.png\" alt=\"([^\"]+)\"")
+ rgx = re.compile('fake.png" alt="([^"]+)"')
a = re.findall(rgx, resp.content)
while i < 100:
resp = c.get("/partners", follow=True)
diff --git a/tests/test_search.py b/tests/test_search.py
index b7d5dba9..bc93077a 100644
--- a/tests/test_search.py
+++ b/tests/test_search.py
@@ -40,24 +40,31 @@ def setUpTestData(cls):
if model.handleref.tag == "net":
kwargs = {"asn": 1}
cls.instances[model.handleref.tag] = model.objects.create(
- status="ok", org=cls.org, name="Test %s" % model.handleref.tag,
- **kwargs)
+ status="ok", org=cls.org, name="Test %s" % model.handleref.tag, **kwargs
+ )
if model.handleref.tag == "net":
kwargs = {"asn": 2}
cls.instances_accented[model.handleref.tag] = model.objects.create(
- status="ok", org=cls.org,
- name=u"ãccented {}".format(model.handleref.tag), **kwargs)
+ status="ok",
+ org=cls.org,
+ name=u"ãccented {}".format(model.handleref.tag),
+ **kwargs
+ )
# we also need to test that sponsor ship status comes through
# accordingly
- cls.org_w_sponsorship = models.Organization.objects.create(name="Sponsor org", status="ok")
+ cls.org_w_sponsorship = models.Organization.objects.create(
+ name="Sponsor org", status="ok"
+ )
cls.sponsorship = models.Sponsorship.objects.create(
start_date=datetime.datetime.now() - datetime.timedelta(days=1),
end_date=datetime.datetime.now() + datetime.timedelta(days=1),
- level=1);
- models.SponsorshipOrganization.objects.create(org=cls.org_w_sponsorship,
- sponsorship=cls.sponsorship)
+ level=1,
+ )
+ models.SponsorshipOrganization.objects.create(
+ org=cls.org_w_sponsorship, sponsorship=cls.sponsorship
+ )
for model in search.searchable_models:
if model.handleref.tag == "net":
@@ -65,10 +72,11 @@ def setUpTestData(cls):
else:
kwargs = {}
cls.instances_sponsored[model.handleref.tag] = model.objects.create(
- status="ok", org=cls.org_w_sponsorship,
+ status="ok",
+ org=cls.org_w_sponsorship,
name="Sponsor %s" % model.handleref.tag,
- **kwargs)
-
+ **kwargs
+ )
def test_search(self):
"""
@@ -99,15 +107,14 @@ def test_sponsor_badges(self):
"""
factory = RequestFactory()
- request = factory.get("/search",{"q":"Sponsor"})
+ request = factory.get("/search", {"q": "Sponsor"})
response = views.request_search(request)
- m = re.findall(re.escape(
- '