diff --git a/addons/base/generic_views.py b/addons/base/generic_views.py index 3ca68c3b68d0..e4c479378063 100644 --- a/addons/base/generic_views.py +++ b/addons/base/generic_views.py @@ -7,7 +7,7 @@ from framework.exceptions import HTTPError, PermissionsError from framework.auth.decorators import must_be_logged_in -from website.oauth.models import ExternalAccount +from osf.models import ExternalAccount from website.util import permissions from website.project.decorators import ( diff --git a/addons/base/views.py b/addons/base/views.py index d18736151204..bf114badb998 100644 --- a/addons/base/views.py +++ b/addons/base/views.py @@ -31,12 +31,12 @@ from website import settings from addons.base import exceptions from addons.base import signals as file_signals -from osf.models import FileNode, StoredFileNode, TrashedFileNode -from website.models import Node, NodeLog, User +from osf.models import (FileNode, StoredFileNode, TrashedFileNode, + OSFUser as User, AbstractNode as Node, + NodeLog, DraftRegistration, MetaSchema) from website.profile.utils import get_gravatar from website.project import decorators from website.project.decorators import must_be_contributor_or_public, must_be_valid_project -from website.project.model import DraftRegistration, MetaSchema from website.project.utils import serialize_node from website.settings import MFR_SERVER_URL from website.util import rubeus diff --git a/addons/dataverse/views.py b/addons/dataverse/views.py index 8a03fc3b1ae4..d151c0acdcc3 100644 --- a/addons/dataverse/views.py +++ b/addons/dataverse/views.py @@ -16,7 +16,7 @@ from addons.dataverse.settings import DEFAULT_HOSTS from addons.dataverse.serializer import DataverseSerializer from dataverse.exceptions import VersionJsonNotFoundError, OperationFailedError -from website.oauth.models import ExternalAccount +from osf.models import ExternalAccount from website.project.decorators import ( must_have_addon, must_be_addon_authorizer, must_have_permission, must_not_be_registration, diff --git a/addons/github/views.py b/addons/github/views.py index 0a3b7da6fd11..14646eb90769 100644 --- a/addons/github/views.py +++ b/addons/github/views.py @@ -15,7 +15,7 @@ from addons.github.serializer import GitHubSerializer from addons.github.utils import verify_hook_signature, MESSAGES -from website.models import NodeLog +from osf.models import NodeLog from website.project.decorators import ( must_have_addon, must_be_addon_authorizer, must_have_permission, must_not_be_registration, diff --git a/addons/osfstorage/decorators.py b/addons/osfstorage/decorators.py index c6e989efe3b8..dd0b9488ff05 100644 --- a/addons/osfstorage/decorators.py +++ b/addons/osfstorage/decorators.py @@ -7,8 +7,8 @@ from framework.exceptions import HTTPError +from addons.osfstorage.models import OsfStorageFileNode, OsfStorageFolder from osf.models import OSFUser as User, AbstractNode as Node -from website.files import models from website.files import exceptions from website.project.decorators import ( must_not_be_registration, must_have_addon, @@ -44,7 +44,7 @@ def wrapped(*args, **kwargs): if 'fid' not in kwargs and default_root: file_node = kwargs['node_addon'].get_root() else: - file_node = models.OsfStorageFileNode.get(kwargs.get('fid'), node) + file_node = OsfStorageFileNode.get(kwargs.get('fid'), node) if must_be and file_node.kind != must_be: raise HTTPError(httplib.BAD_REQUEST, data={ @@ -71,8 +71,8 @@ def wrapped(payload, *args, **kwargs): try: user = User.load(payload['user']) dest_node = Node.load(payload['destination']['node']) - source = models.OsfStorageFileNode.get(payload['source'], kwargs['node']) - dest_parent = models.OsfStorageFolder.get(payload['destination']['parent'], dest_node) + source = OsfStorageFileNode.get(payload['source'], kwargs['node']) + dest_parent = OsfStorageFolder.get(payload['destination']['parent'], dest_node) kwargs.update({ 'user': user, diff --git a/addons/osfstorage/tests/test_utils.py b/addons/osfstorage/tests/test_utils.py index 398845471980..563a93151dda 100644 --- a/addons/osfstorage/tests/test_utils.py +++ b/addons/osfstorage/tests/test_utils.py @@ -7,7 +7,7 @@ from framework import sessions from framework.flask import request -from website.models import Session +from osf.models import Session from addons.osfstorage.tests import factories from addons.osfstorage import utils diff --git a/addons/osfstorage/tests/test_views.py b/addons/osfstorage/tests/test_views.py index 3c74ec7b526a..33460436eb77 100644 --- a/addons/osfstorage/tests/test_views.py +++ b/addons/osfstorage/tests/test_views.py @@ -18,8 +18,8 @@ from framework.auth import signing from website.util import rubeus -from website.models import Tag -from website.files import models +from osf.models import Tag +from osf.models import files as models from addons.osfstorage.apps import osf_storage_root from addons.osfstorage import utils from addons.base.views import make_auth @@ -572,7 +572,7 @@ def test_delete(self): fid = file._id del file # models.StoredFileNode._clear_object_cache() - assert_is(models.OsfStorageFileNode.load(fid), None) + assert_is(OsfStorageFileNode.load(fid), None) assert_true(models.TrashedFileNode.load(fid)) def test_delete_deleted(self): diff --git a/addons/osfstorage/views.py b/addons/osfstorage/views.py index bcfc543f8eca..3b4b88723ece 100644 --- a/addons/osfstorage/views.py +++ b/addons/osfstorage/views.py @@ -17,17 +17,17 @@ from framework.auth.decorators import must_be_signed from osf.exceptions import InvalidTagError, TagNotFoundError -from osf.models import OSFUser +from osf.models import FileVersion, OSFUser from website.project.decorators import ( must_not_be_registration, must_have_addon, must_have_permission ) from website.project.model import has_anonymous_link -from website.files import models from website.files import exceptions from addons.osfstorage import utils from addons.osfstorage import decorators from addons.osfstorage import settings as osf_storage_settings +from addons.osfstorage.models import OsfStorageFolder logger = logging.getLogger(__name__) @@ -51,7 +51,7 @@ def osfstorage_update_metadata(node_addon, payload, **kwargs): except KeyError: raise HTTPError(httplib.BAD_REQUEST) - version = models.FileVersion.load(version_id) + version = FileVersion.load(version_id) if version is None: raise HTTPError(httplib.NOT_FOUND) @@ -108,7 +108,7 @@ def osfstorage_move_hook(source, destination, name=None, **kwargs): @decorators.autoload_filenode(default_root=True) def osfstorage_get_lineage(file_node, node_addon, **kwargs): #TODO Profile - list(models.OsfStorageFolder.find(Q('node', 'eq', node_addon.owner))) + list(OsfStorageFolder.find(Q('node', 'eq', node_addon.owner))) lineage = [] diff --git a/addons/owncloud/models.py b/addons/owncloud/models.py index 14c3f053ead6..84209e1a783c 100644 --- a/addons/owncloud/models.py +++ b/addons/owncloud/models.py @@ -11,7 +11,7 @@ from addons.owncloud import settings from addons.owncloud.serializer import OwnCloudSerializer from addons.owncloud.settings import DEFAULT_HOSTS, USE_SSL -from website.oauth.models import BasicAuthProviderMixin +from osf.models.external import BasicAuthProviderMixin from website.util import api_v2_url logger = logging.getLogger(__name__) diff --git a/addons/owncloud/views.py b/addons/owncloud/views.py index a79d2e32c6d4..abf268ed8f96 100644 --- a/addons/owncloud/views.py +++ b/addons/owncloud/views.py @@ -9,7 +9,7 @@ from framework.auth.decorators import must_be_logged_in from addons.base import generic_views -from website.oauth.models import ExternalAccount +from osf.models import ExternalAccount from website.project.decorators import ( must_have_addon) diff --git a/addons/s3/views.py b/addons/s3/views.py index 341758d5c6f9..998a4227a2c7 100644 --- a/addons/s3/views.py +++ b/addons/s3/views.py @@ -10,7 +10,7 @@ from addons.base import generic_views from addons.s3 import utils from addons.s3.serializer import S3Serializer -from website.oauth.models import ExternalAccount +from osf.models import ExternalAccount from website.project.decorators import ( must_have_addon, must_have_permission, must_be_addon_authorizer, diff --git a/addons/zotero/tests/test_views.py b/addons/zotero/tests/test_views.py index 00d2acda5856..c34aba028b25 100644 --- a/addons/zotero/tests/test_views.py +++ b/addons/zotero/tests/test_views.py @@ -18,7 +18,7 @@ class TestAuthViews(ZoteroTestCase, views.OAuthAddonAuthViewsTestCaseMixin, OsfTestCase): - @mock.patch('website.oauth.models.OAuth1Session.fetch_request_token') + @mock.patch('osf.models.external.OAuth1Session.fetch_request_token') def test_oauth_start(self, mock_token): mock_token.return_value = { 'oauth_token': 'token', diff --git a/admin/meetings/forms.py b/admin/meetings/forms.py index db4420dfb215..28815ead2ba6 100644 --- a/admin/meetings/forms.py +++ b/admin/meetings/forms.py @@ -4,7 +4,7 @@ from django.core.validators import validate_email from framework.auth.core import get_user -from website.models import Conference +from osf.models import Conference from website.conferences.exceptions import ConferenceError diff --git a/admin/meetings/serializers.py b/admin/meetings/serializers.py index 030b1f9597b1..24e3f0714239 100644 --- a/admin/meetings/serializers.py +++ b/admin/meetings/serializers.py @@ -1,4 +1,4 @@ -from website.conferences.model import DEFAULT_FIELD_NAMES +from osf.models.conference import DEFAULT_FIELD_NAMES def serialize_meeting(meeting): is_meeting = True diff --git a/admin/nodes/views.py b/admin/nodes/views.py index 2dd4fa9435e7..a833d7d41c2f 100644 --- a/admin/nodes/views.py +++ b/admin/nodes/views.py @@ -8,10 +8,11 @@ from django.contrib.auth.mixins import PermissionRequiredMixin from modularodm import Q -from website.models import NodeLog +from osf.models import NodeLog from osf.models.user import OSFUser from osf.models.node import Node from osf.models.registrations import Registration +from osf.models import SpamStatus from admin.base.views import GuidFormView, GuidView from osf.models.admin_log_entry import ( update_admin_log, @@ -21,7 +22,6 @@ CONFIRM_SPAM, CONFIRM_HAM) from admin.nodes.templatetags.node_extras import reverse_node from admin.nodes.serializers import serialize_node, serialize_simple_user_and_node_permissions -from website.project.spam.model import SpamStatus from website.project.views.register import osf_admin_change_status_identifier diff --git a/admin/spam/forms.py b/admin/spam/forms.py index 2c4fb9274773..22bc9255c3e0 100644 --- a/admin/spam/forms.py +++ b/admin/spam/forms.py @@ -1,6 +1,6 @@ from django import forms -from website.project.spam.model import SpamStatus +from osf.models import SpamStatus class ConfirmForm(forms.Form): diff --git a/admin/spam/serializers.py b/admin/spam/serializers.py index 088f3ba8128a..ec7a3ff7a580 100644 --- a/admin/spam/serializers.py +++ b/admin/spam/serializers.py @@ -1,5 +1,5 @@ from website.settings import DOMAIN as OSF_DOMAIN -from website.project.model import User +from osf.models import OSFUser from furl import furl @@ -13,7 +13,7 @@ def serialize_comment(comment): return { 'id': comment._id, - 'author': User.load(comment.user._id), + 'author': OSFUser.load(comment.user._id), 'author_id': comment.user._id, 'author_path': author_abs_url.url, 'date_created': comment.date_created, @@ -31,7 +31,7 @@ def serialize_comment(comment): def serialize_report(user, report): return { - 'reporter': User.load(user), + 'reporter': OSFUser.load(user), 'category': report.get('category', None), 'reason': report.get('text', None), } diff --git a/admin/spam/views.py b/admin/spam/views.py index 69abb893aa5f..e88cd5af21aa 100644 --- a/admin/spam/views.py +++ b/admin/spam/views.py @@ -6,7 +6,7 @@ from osf.models.comment import Comment from osf.models.user import OSFUser -from website.project.spam.model import SpamStatus +from osf.models import SpamStatus from osf.models.admin_log_entry import ( update_admin_log, diff --git a/admin_tests/pre_reg/utils.py b/admin_tests/pre_reg/utils.py index 43ee026ef8a0..e30fd9c059e4 100644 --- a/admin_tests/pre_reg/utils.py +++ b/admin_tests/pre_reg/utils.py @@ -1,6 +1,7 @@ from modularodm import Q -from website.project.model import DraftRegistration, ensure_schemas, MetaSchema +from osf.models import DraftRegistration, MetaSchema +from website.project.model import ensure_schemas def draft_reg_util(): diff --git a/admin_tests/spam/test_views.py b/admin_tests/spam/test_views.py index 62c7ff7e06ab..2cc1800dd14c 100644 --- a/admin_tests/spam/test_views.py +++ b/admin_tests/spam/test_views.py @@ -5,7 +5,7 @@ from nose import tools as nt from datetime import timedelta -from website.project.model import Comment +from osf.models import Comment, SpamStatus from osf.models.admin_log_entry import AdminLogEntry from admin.spam.forms import ConfirmForm @@ -20,7 +20,6 @@ SpamDetail, EmailView, ) -from website.project.spam.model import SpamStatus class TestSpamListView(AdminTestCase): diff --git a/api/applications/serializers.py b/api/applications/serializers.py index 5b38519f0d41..9c57f3d0d90d 100644 --- a/api/applications/serializers.py +++ b/api/applications/serializers.py @@ -3,7 +3,7 @@ from modularodm import Q -from website.models import ApiOAuth2Application +from osf.models import ApiOAuth2Application from api.base.serializers import JSONAPISerializer, LinksField, IDField, TypeField, DateByVersion from api.base.utils import absolute_reverse diff --git a/api/applications/views.py b/api/applications/views.py index 5b365da58539..845ea17f50e0 100644 --- a/api/applications/views.py +++ b/api/applications/views.py @@ -11,7 +11,7 @@ from framework.auth import cas from framework.auth.oauth_scopes import CoreScopes -from website.models import ApiOAuth2Application +from osf.models import ApiOAuth2Application from api.base.filters import ODMFilterMixin from api.base.utils import get_object_or_error diff --git a/api/base/authentication/backends.py b/api/base/authentication/backends.py index efc5caa8e7b3..689549886588 100644 --- a/api/base/authentication/backends.py +++ b/api/base/authentication/backends.py @@ -1,5 +1,5 @@ from osf.models.user import OSFUser -from framework.auth.core import get_user, User +from framework.auth.core import get_user from django.contrib.auth.backends import ModelBackend # https://docs.djangoproject.com/en/1.8/topics/auth/customizing/ @@ -12,5 +12,5 @@ def get_user(self, user_id): try: user = OSFUser.objects.get(id=user_id) except OSFUser.DoesNotExist: - user = User.load(user_id) + user = OSFUser.load(user_id) return user diff --git a/api/base/pagination.py b/api/base/pagination.py index e21d60e09faf..a79d6f216948 100644 --- a/api/base/pagination.py +++ b/api/base/pagination.py @@ -14,8 +14,7 @@ from api.base.settings import MAX_PAGE_SIZE from api.base.utils import absolute_reverse -from framework.guid.model import Guid -from website.project.model import Node, Comment +from osf.models import AbstractNode as Node, Comment, Guid from website.search.elastic_search import DOC_TYPE_TO_MODEL diff --git a/api/base/permissions.py b/api/base/permissions.py index 4bf2365b3bac..8a8aa3a447b6 100644 --- a/api/base/permissions.py +++ b/api/base/permissions.py @@ -8,7 +8,7 @@ from framework.auth import oauth_scopes from framework.auth.cas import CasResponse -from website.models import ApiOAuth2Application, ApiOAuth2PersonalToken +from osf.models import ApiOAuth2Application, ApiOAuth2PersonalToken from website.util.sanitize import is_iterable_but_not_string diff --git a/api/base/serializers.py b/api/base/serializers.py index ddcf049b6239..7a40ce13bf9c 100644 --- a/api/base/serializers.py +++ b/api/base/serializers.py @@ -20,9 +20,9 @@ from api.base.settings import BULK_SETTINGS from api.base.utils import absolute_reverse, extend_querystring_params, get_user_auth, extend_querystring_if_key_exists from framework.auth import core as auth_core +from osf.models import AbstractNode as Node from website import settings from website import util as website_utils -from website.models import Node from website.util.sanitize import strip_html from website.project.model import has_anonymous_link diff --git a/api/base/utils.py b/api/base/utils.py index 9e2740d88976..a35d432752b0 100644 --- a/api/base/utils.py +++ b/api/base/utils.py @@ -10,9 +10,10 @@ from api.base.authentication.drf import get_session_from_cookie from api.base.exceptions import Gone -from framework.auth import Auth, User +from framework.auth import Auth from framework.auth.cas import CasResponse from framework.auth.oauth_scopes import ComposedScopes, normalize_scopes +from osf.models import OSFUser as User from osf.models.base import GuidMixin from osf.modm_compat import to_django_query from website import settings as website_settings diff --git a/api/base/views.py b/api/base/views.py index 6f7755907a7b..ccfe8ec3e0f1 100644 --- a/api/base/views.py +++ b/api/base/views.py @@ -26,7 +26,6 @@ from api.users.serializers import UserSerializer from framework.auth.oauth_scopes import CoreScopes from osf.models.contributor import Contributor -from website.models import Pointer from website import maintenance @@ -849,7 +848,8 @@ class BaseLinkedList(JSONAPIBaseView, generics.ListAPIView): view_category = None view_name = None - model_class = Pointer + # TODO: This class no longer exists + # model_class = Pointer def get_queryset(self): auth = get_user_auth(self.request) diff --git a/api/caching/tasks.py b/api/caching/tasks.py index acf1d78bbf4e..946446a25e67 100644 --- a/api/caching/tasks.py +++ b/api/caching/tasks.py @@ -2,7 +2,6 @@ import requests import logging -from website.project.model import Comment from website import settings @@ -15,6 +14,7 @@ def get_varnish_servers(): def get_bannable_urls(instance): + from osf.models import Comment bannable_urls = [] parsed_absolute_url = {} diff --git a/api/caching/tests/test_caching.py b/api/caching/tests/test_caching.py index ea7cad0cde28..e7da242eba04 100644 --- a/api/caching/tests/test_caching.py +++ b/api/caching/tests/test_caching.py @@ -10,7 +10,7 @@ from django.conf import settings as django_settings from requests.auth import HTTPBasicAuth -from framework.auth import User +from osf.models import OSFUser as User from tests.factories import create_fake_project from tests.base import DbTestCase diff --git a/api/citations/utils.py b/api/citations/utils.py index 268d8f08adc9..bbb67c2d37aa 100644 --- a/api/citations/utils.py +++ b/api/citations/utils.py @@ -6,7 +6,7 @@ from citeproc import formatter from citeproc.source.json import CiteProcJSON -from website.preprints.model import PreprintService +from osf.models import PreprintService from website.settings import CITATION_STYLES_PATH, BASE_PATH, CUSTOM_CITATIONS diff --git a/api/collections/serializers.py b/api/collections/serializers.py index 06854d373b35..463e7c846c50 100644 --- a/api/collections/serializers.py +++ b/api/collections/serializers.py @@ -3,8 +3,7 @@ from rest_framework import exceptions from framework.exceptions import PermissionsError -from website.models import Node -from osf.models import Collection +from osf.models import AbstractNode as Node, Collection from osf.exceptions import ValidationError from api.base.serializers import LinksField, RelationshipField from api.base.serializers import JSONAPISerializer, IDField, TypeField, DateByVersion diff --git a/api/comments/permissions.py b/api/comments/permissions.py index 8d9ed751a872..9b852c75fcf3 100644 --- a/api/comments/permissions.py +++ b/api/comments/permissions.py @@ -3,7 +3,7 @@ from api.base.utils import get_user_auth from api.comments.serializers import CommentReport -from website.models import Node, Comment +from osf.models import AbstractNode as Node, Comment class CanCommentOrPublic(permissions.BasePermission): diff --git a/api/comments/serializers.py b/api/comments/serializers.py index 36a916e4efea..fd4f86eca129 100644 --- a/api/comments/serializers.py +++ b/api/comments/serializers.py @@ -5,9 +5,7 @@ from osf.exceptions import ValidationError as ModelValidationError from framework.auth.core import Auth from framework.exceptions import PermissionsError -from framework.guid.model import Guid -from website.files.models import StoredFileNode -from website.project.model import Comment +from osf.models import Guid, Comment, StoredFileNode, SpamStatus from rest_framework.exceptions import ValidationError, PermissionDenied from api.base.exceptions import InvalidModelValueError, Conflict from api.base.utils import absolute_reverse @@ -18,7 +16,6 @@ IDField, TypeField, LinksField, AnonymizedRegexField, DateByVersion) -from website.project.spam.model import SpamStatus class CommentReport(object): diff --git a/api/comments/views.py b/api/comments/views.py index a01f7b564bcd..7634c8c47a4b 100644 --- a/api/comments/views.py +++ b/api/comments/views.py @@ -22,9 +22,8 @@ from framework.auth.core import Auth from framework.auth.oauth_scopes import CoreScopes from framework.exceptions import PermissionsError -from website.project.model import Comment, Node +from osf.models import AbstractNode as Node, Comment, StoredFileNode from addons.wiki.models import NodeWikiPage -from website.files.models.base import StoredFileNode class CommentMixin(object): diff --git a/api/files/serializers.py b/api/files/serializers.py index 1411dba199b3..545136f0390e 100644 --- a/api/files/serializers.py +++ b/api/files/serializers.py @@ -5,11 +5,10 @@ import furl import pytz -from framework.auth.core import Auth, User -from osf.models import FileNode +from framework.auth.core import Auth +from osf.models import FileNode, OSFUser, Comment from rest_framework import serializers as ser from website import settings -from website.project.model import Comment from website.util import api_v2_url from api.base.serializers import ( @@ -64,7 +63,7 @@ def resolve(self, resource, field_name, request): ) def get_queryset(self): - return User.find(Q('_id', 'eq', self.context['request'].user._id)) + return OSFUser.find(Q('_id', 'eq', self.context['request'].user._id)) def get_url(self, obj, view_name, request, format): if obj is None: diff --git a/api/guids/serializers.py b/api/guids/serializers.py index 1616e468f3c7..faf05c49be6f 100644 --- a/api/guids/serializers.py +++ b/api/guids/serializers.py @@ -1,7 +1,6 @@ import urlparse -from website.models import Node, User, Guid -from website.files.models.base import StoredFileNode +from osf.models import OSFUser, AbstractNode as Node, Guid, StoredFileNode from website import settings as website_settings from api.base.utils import absolute_reverse @@ -11,7 +10,7 @@ def get_type(record): if isinstance(record, Node): return 'nodes' - elif isinstance(record, User): + elif isinstance(record, OSFUser): return 'users' elif isinstance(record, StoredFileNode): return 'files' diff --git a/api/guids/views.py b/api/guids/views.py index f1c8ab4f2409..53ae77155cb2 100644 --- a/api/guids/views.py +++ b/api/guids/views.py @@ -4,13 +4,13 @@ from rest_framework import permissions as drf_permissions from rest_framework import generics -from framework.guid.model import Guid from framework.auth.oauth_scopes import CoreScopes from api.base.exceptions import EndpointNotImplementedError from api.base import permissions as base_permissions from api.base.views import JSONAPIBaseView from api.base.utils import get_object_or_error, is_truthy from api.guids.serializers import GuidSerializer +from osf.models import Guid class GuidDetail(JSONAPIBaseView, generics.RetrieveAPIView): diff --git a/api/identifiers/views.py b/api/identifiers/views.py index 283f37db397c..fd625e106a0e 100644 --- a/api/identifiers/views.py +++ b/api/identifiers/views.py @@ -17,8 +17,7 @@ ExcludeWithdrawals, ) -from website.identifiers.model import Identifier -from website.project.model import Node +from osf.models import AbstractNode as Node, Identifier class IdentifierList(JSONAPIBaseView, generics.ListAPIView, ODMFilterMixin): diff --git a/api/institutions/authentication.py b/api/institutions/authentication.py index 9b52bd2ee6bf..926be8283c1e 100644 --- a/api/institutions/authentication.py +++ b/api/institutions/authentication.py @@ -11,7 +11,7 @@ from framework import sentry from framework.auth import get_or_create_user -from website.models import Institution +from osf.models import Institution from website.mails import send_mail, WELCOME_OSF4I diff --git a/api/institutions/serializers.py b/api/institutions/serializers.py index 0550a8666053..1091428e35f5 100644 --- a/api/institutions/serializers.py +++ b/api/institutions/serializers.py @@ -4,7 +4,7 @@ from modularodm import Q -from website.models import Node +from osf.models import AbstractNode as Node from website.util import permissions as osf_permissions from api.base.serializers import JSONAPISerializer, RelationshipField, LinksField, JSONAPIRelationshipSerializer, \ diff --git a/api/institutions/views.py b/api/institutions/views.py index 7cb2f8cc04d4..a9ee73373d7c 100644 --- a/api/institutions/views.py +++ b/api/institutions/views.py @@ -9,7 +9,7 @@ from framework.auth.oauth_scopes import CoreScopes -from website.models import Node, User, Institution +from osf.models import OSFUser as User, AbstractNode as Node, Institution from website.util import permissions as osf_permissions from api.base import permissions as base_permissions diff --git a/api/licenses/views.py b/api/licenses/views.py index f7590b3ae66e..91d73fe96e3e 100644 --- a/api/licenses/views.py +++ b/api/licenses/views.py @@ -7,7 +7,7 @@ from api.licenses.serializers import LicenseSerializer from api.base.views import JSONAPIBaseView -from website.project.licenses import NodeLicense +from osf.models import NodeLicense class LicenseDetail(JSONAPIBaseView, generics.RetrieveAPIView): diff --git a/api/logs/permissions.py b/api/logs/permissions.py index e6283fb1f75e..057e20bea531 100644 --- a/api/logs/permissions.py +++ b/api/logs/permissions.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- from rest_framework import permissions -from website.models import NodeLog +from osf.models import NodeLog from api.nodes.permissions import ContributorOrPublic diff --git a/api/logs/serializers.py b/api/logs/serializers.py index 4fbba4ff7214..a31f4e59740f 100644 --- a/api/logs/serializers.py +++ b/api/logs/serializers.py @@ -8,10 +8,8 @@ is_anonymized, DateByVersion, ) -from website.project.model import Node +from osf.models import OSFUser, AbstractNode as Node, PreprintService from website.util import permissions as osf_permissions -from framework.auth.core import User -from website.preprints.model import PreprintService class NodeLogIdentifiersSerializer(RestrictedDictSerializer): @@ -123,7 +121,7 @@ def get_contributors(self, obj): if contributor_ids: for contrib_id in contributor_ids: - user = User.load(contrib_id) + user = OSFUser.load(contrib_id) unregistered_name = None if user.unclaimed_records.get(params_node): unregistered_name = user.unclaimed_records[params_node].get('name', None) diff --git a/api/logs/views.py b/api/logs/views.py index bcaff664fe8e..134c98c7b7cb 100644 --- a/api/logs/views.py +++ b/api/logs/views.py @@ -3,7 +3,7 @@ from framework.auth.oauth_scopes import CoreScopes -from website.models import NodeLog +from osf.models import NodeLog from api.logs.permissions import ( ContributorOrPublicForLogs ) diff --git a/api/metaschemas/views.py b/api/metaschemas/views.py index e1802220d5f6..8875e669c076 100644 --- a/api/metaschemas/views.py +++ b/api/metaschemas/views.py @@ -9,7 +9,7 @@ from api.base.views import JSONAPIBaseView from api.base.utils import get_object_or_error -from website.models import MetaSchema +from osf.models import MetaSchema from api.metaschemas.serializers import MetaSchemaSerializer diff --git a/api/nodes/serializers.py b/api/nodes/serializers.py index 995185c917f9..e16b024e7ab6 100644 --- a/api/nodes/serializers.py +++ b/api/nodes/serializers.py @@ -23,12 +23,12 @@ from rest_framework import exceptions from addons.base.exceptions import InvalidAuthError, InvalidFolderError from website.exceptions import NodeStateError -from website.models import (Comment, DraftRegistration, Institution, - MetaSchema, Node, PrivateLink) -from website.oauth.models import ExternalAccount -from website.preprints.model import PreprintService +from osf.models import (Comment, DraftRegistration, Institution, + MetaSchema, AbstractNode as Node, PrivateLink) +from osf.models.external import ExternalAccount +from osf.models.licenses import NodeLicense +from osf.models.preprint_service import PreprintService from website.project import new_private_link -from website.project.licenses import NodeLicense from website.project.metadata.schemas import (ACTIVE_META_SCHEMAS, LATEST_SCHEMA_VERSION) from website.project.metadata.utils import is_prereg_admin_not_project_admin diff --git a/api/nodes/utils.py b/api/nodes/utils.py index a38ceab757ca..2137d14e00a9 100644 --- a/api/nodes/utils.py +++ b/api/nodes/utils.py @@ -4,8 +4,7 @@ from rest_framework.status import is_server_error import requests -from website.files.models import OsfStorageFile -from website.files.models import OsfStorageFolder +from addons.osfstorage.models import OsfStorageFile, OsfStorageFolder from website.util import waterbutler_api_url_for from api.base.exceptions import ServiceUnavailableError diff --git a/api/preprint_providers/views.py b/api/preprint_providers/views.py index f198194a6982..2264aa4b5246 100644 --- a/api/preprint_providers/views.py +++ b/api/preprint_providers/views.py @@ -6,7 +6,7 @@ from framework.auth.oauth_scopes import CoreScopes -from website.models import Node, Subject, PreprintService, PreprintProvider +from osf.models import AbstractNode as Node, Subject, PreprintService, PreprintProvider from api.base import permissions as base_permissions from api.base.filters import PreprintFilterMixin, ODMFilterMixin diff --git a/api/preprints/permissions.py b/api/preprints/permissions.py index 3812f0c729b8..13d934c4f448 100644 --- a/api/preprints/permissions.py +++ b/api/preprints/permissions.py @@ -3,7 +3,7 @@ from rest_framework import exceptions from api.base.utils import get_user_auth -from website.models import PreprintService +from osf.models import PreprintService from website.util import permissions as osf_permissions diff --git a/api/preprints/views.py b/api/preprints/views.py index 612eacda5594..ee14c3b008f2 100644 --- a/api/preprints/views.py +++ b/api/preprints/views.py @@ -6,7 +6,7 @@ from rest_framework.exceptions import NotFound, PermissionDenied, NotAuthenticated from rest_framework import permissions as drf_permissions -from website.models import PreprintService +from osf.models import PreprintService from framework.auth.oauth_scopes import CoreScopes from api.base.exceptions import Conflict diff --git a/api/registrations/views.py b/api/registrations/views.py index 6b64cd63dc57..cae7b9f68676 100644 --- a/api/registrations/views.py +++ b/api/registrations/views.py @@ -1,8 +1,9 @@ from rest_framework import generics, permissions as drf_permissions from rest_framework.exceptions import ValidationError, NotFound from framework.auth.oauth_scopes import CoreScopes +from modularodm import Q -from website.project.model import Q, Node, Pointer +from osf.models import AbstractNode as Node from api.base import permissions as base_permissions from api.base.views import JSONAPIBaseView, BaseContributorDetail, BaseContributorList, BaseNodeLinksDetail, BaseNodeLinksList @@ -771,7 +772,8 @@ class RegistrationNodeLinksList(BaseNodeLinksList, RegistrationMixin): required_read_scopes = [CoreScopes.NODE_REGISTRATIONS_READ] required_write_scopes = [CoreScopes.NULL] - model_class = Pointer + # TODO: This class doesn't exist + # model_class = Pointer class RegistrationNodeLinksDetail(BaseNodeLinksDetail, RegistrationMixin): @@ -826,7 +828,8 @@ class RegistrationNodeLinksDetail(BaseNodeLinksDetail, RegistrationMixin): required_read_scopes = [CoreScopes.NODE_REGISTRATIONS_READ] required_write_scopes = [CoreScopes.NULL] - model_class = Pointer + # TODO: this class doesn't exist + # model_class = Pointer # overrides RetrieveAPIView def get_object(self): diff --git a/api/taxonomies/serializers.py b/api/taxonomies/serializers.py index c1447cf2aecc..1f3780bd1408 100644 --- a/api/taxonomies/serializers.py +++ b/api/taxonomies/serializers.py @@ -1,7 +1,7 @@ from rest_framework import serializers as ser from api.base.serializers import JSONAPISerializer, LinksField, ShowIfVersion -from website.models import Subject +from osf.models import Subject class TaxonomyField(ser.Field): def to_representation(self, subject): diff --git a/api/taxonomies/views.py b/api/taxonomies/views.py index e9575e31fa66..04433aba7bd8 100644 --- a/api/taxonomies/views.py +++ b/api/taxonomies/views.py @@ -6,7 +6,7 @@ from api.base.pagination import NoMaxPageSizePagination from api.base import permissions as base_permissions from api.taxonomies.serializers import TaxonomySerializer -from website.project.taxonomies import Subject +from osf.models import Subject from framework.auth.oauth_scopes import CoreScopes diff --git a/api/tokens/serializers.py b/api/tokens/serializers.py index e350dadfa9bd..68a4c45cdb37 100644 --- a/api/tokens/serializers.py +++ b/api/tokens/serializers.py @@ -2,7 +2,7 @@ from rest_framework import exceptions from framework.auth.oauth_scopes import public_scopes -from website.models import ApiOAuth2PersonalToken +from osf.models import ApiOAuth2PersonalToken from api.base.serializers import JSONAPISerializer, LinksField, IDField, TypeField diff --git a/api/tokens/views.py b/api/tokens/views.py index c6b1ba6b749e..a6a7ce2bac4c 100644 --- a/api/tokens/views.py +++ b/api/tokens/views.py @@ -17,7 +17,7 @@ from api.base import permissions as base_permissions from api.tokens.serializers import ApiOAuth2PersonalTokenSerializer -from website.models import ApiOAuth2PersonalToken +from osf.models import ApiOAuth2PersonalToken class TokenList(JSONAPIBaseView, generics.ListCreateAPIView, ODMFilterMixin): diff --git a/api/users/permissions.py b/api/users/permissions.py index 5a3a1c57271f..654223217473 100644 --- a/api/users/permissions.py +++ b/api/users/permissions.py @@ -1,4 +1,4 @@ -from website.models import User +from osf.models import OSFUser from rest_framework import permissions @@ -7,7 +7,7 @@ class ReadOnlyOrCurrentUser(permissions.BasePermission): and allow non-safe actions if so. """ def has_object_permission(self, request, view, obj): - assert isinstance(obj, User), 'obj must be a User, got {}'.format(obj) + assert isinstance(obj, OSFUser), 'obj must be a User, got {}'.format(obj) request_user = request.user if request.method in permissions.SAFE_METHODS: return True @@ -20,7 +20,7 @@ class CurrentUser(permissions.BasePermission): def has_permission(self, request, view): requested_user = view.get_user() - assert isinstance(requested_user, User), 'obj must be a User, got {}'.format(requested_user) + assert isinstance(requested_user, OSFUser), 'obj must be a User, got {}'.format(requested_user) return requested_user == request.user class ReadOnlyOrCurrentUserRelationship(permissions.BasePermission): diff --git a/api/users/serializers.py b/api/users/serializers.py index fdfd8b4c191c..d86fe29b5a5e 100644 --- a/api/users/serializers.py +++ b/api/users/serializers.py @@ -4,7 +4,7 @@ from api.base.exceptions import InvalidModelValueError from api.base.serializers import JSONAPIRelationshipSerializer, HideIfDisabled, PrefetchRelationshipsSerializer -from website.models import User +from osf.models import OSFUser as User from api.base.serializers import ( JSONAPISerializer, LinksField, RelationshipField, DevOnly, IDField, TypeField, ListDictField, diff --git a/api/users/views.py b/api/users/views.py index 9c8e594f320f..29ca7976b322 100644 --- a/api/users/views.py +++ b/api/users/views.py @@ -29,8 +29,7 @@ from rest_framework import permissions as drf_permissions from rest_framework import generics from rest_framework.exceptions import NotAuthenticated, NotFound -from website.models import ExternalAccount, Node, User -from osf.models import PreprintService +from osf.models import ExternalAccount, AbstractNode as Node, PreprintService, OSFUser as User class UserMixin(object): diff --git a/api/view_only_links/serializers.py b/api/view_only_links/serializers.py index 0480337a3eef..86fbe69aa4ef 100644 --- a/api/view_only_links/serializers.py +++ b/api/view_only_links/serializers.py @@ -9,7 +9,7 @@ PrefetchRelationshipsSerializer) from api.base.utils import absolute_reverse -from website.project.model import Node +from osf.models import AbstractNode as Node class ViewOnlyLinkDetailSerializer(JSONAPISerializer): diff --git a/api/view_only_links/views.py b/api/view_only_links/views.py index 70d154dec656..561d409e55f0 100644 --- a/api/view_only_links/views.py +++ b/api/view_only_links/views.py @@ -14,7 +14,7 @@ from api.registrations.serializers import RegistrationSerializer from api.view_only_links.serializers import ViewOnlyLinkDetailSerializer, ViewOnlyLinkNodesSerializer -from website.models import PrivateLink +from osf.models import PrivateLink class ViewOnlyLinkDetail(JSONAPIBaseView, generics.RetrieveAPIView): diff --git a/api_tests/applications/views/test_application_detail.py b/api_tests/applications/views/test_application_detail.py index 7078d05ba7ae..a6ae5a4156fb 100644 --- a/api_tests/applications/views/test_application_detail.py +++ b/api_tests/applications/views/test_application_detail.py @@ -2,7 +2,7 @@ from nose.tools import * # flake8: noqa -from website.models import ApiOAuth2Application, User +from osf.models import ApiOAuth2Application from website.util import api_v2_url from tests.base import ApiTestCase diff --git a/api_tests/applications/views/test_application_list.py b/api_tests/applications/views/test_application_list.py index 82097a8b3b6a..ca5818232a3e 100644 --- a/api_tests/applications/views/test_application_list.py +++ b/api_tests/applications/views/test_application_list.py @@ -3,7 +3,7 @@ from nose.tools import * # flake8: noqa -from website.models import ApiOAuth2Application, User +from osf.models import ApiOAuth2Application from website.util import api_v2_url from website.util import sanitize diff --git a/api_tests/applications/views/test_application_reset.py b/api_tests/applications/views/test_application_reset.py index b02ba8a5e1e9..072d29100019 100644 --- a/api_tests/applications/views/test_application_reset.py +++ b/api_tests/applications/views/test_application_reset.py @@ -2,7 +2,7 @@ from nose.tools import * # flake8: noqa -from website.models import ApiOAuth2Application, User +from osf.models import ApiOAuth2Application from website.util import api_v2_url from tests.base import ApiTestCase @@ -60,7 +60,7 @@ def test_reset_url_revokes_tokens_and_resets(self, mock_method): self.user1_app.reload() assert_not_equal(old_secret, self.user1_app.client_secret) - @mock.patch('website.oauth.models.ApiOAuth2Application.reset_secret') + @mock.patch('osf.models.ApiOAuth2Application.reset_secret') def test_other_user_cannot_reset(self, mock_method): mock_method.return_value(True) old_secret = self.user1_app.client_secret @@ -71,7 +71,7 @@ def test_other_user_cannot_reset(self, mock_method): self.user1_app.reload() assert_equal(old_secret, self.user1_app.client_secret) - @mock.patch('website.oauth.models.ApiOAuth2Application.reset_secret') + @mock.patch('osf.models.ApiOAuth2Application.reset_secret') def test_unauth_user_cannot_reset(self, mock_method): mock_method.return_value(True) old_secret = self.user1_app.client_secret diff --git a/api_tests/base/test_root.py b/api_tests/base/test_root.py index f1d5b5612999..123899bad30e 100644 --- a/api_tests/base/test_root.py +++ b/api_tests/base/test_root.py @@ -13,9 +13,8 @@ from framework.auth.oauth_scopes import public_scopes from framework.auth.cas import CasResponse -from framework.sessions.model import Session from website import settings -from website.oauth.models import ApiOAuth2PersonalToken +from osf.models import ApiOAuth2PersonalToken, Session class TestWelcomeToApi(ApiTestCase): def setUp(self): diff --git a/api_tests/base/test_views.py b/api_tests/base/test_views.py index 2130f63e6f2d..fdfc36f5950c 100644 --- a/api_tests/base/test_views.py +++ b/api_tests/base/test_views.py @@ -85,7 +85,7 @@ def test_view_classes_define_or_override_serializer_class(self): has_serializer_class = getattr(view, 'serializer_class', None) or getattr(view, 'get_serializer_class', None) assert_true(has_serializer_class, "{0} should include serializer class or override get_serializer_class()".format(view)) - @mock.patch('framework.auth.core.User.is_confirmed', mock.PropertyMock(return_value=False)) + @mock.patch('osf.models.OSFUser.is_confirmed', mock.PropertyMock(return_value=False)) def test_unconfirmed_user_gets_error(self): user = factories.AuthUserFactory() @@ -93,7 +93,7 @@ def test_unconfirmed_user_gets_error(self): res = self.app.get('/{}nodes/'.format(API_BASE), auth=user.auth, expect_errors=True) assert_equal(res.status_code, http.BAD_REQUEST) - @mock.patch('framework.auth.core.User.is_disabled', mock.PropertyMock(return_value=True)) + @mock.patch('osf.models.OSFUser.is_disabled', mock.PropertyMock(return_value=True)) def test_disabled_user_gets_error(self): user = factories.AuthUserFactory() diff --git a/api_tests/collections/test_views.py b/api_tests/collections/test_views.py index e485bedb9dd7..41329a5f20b3 100644 --- a/api_tests/collections/test_views.py +++ b/api_tests/collections/test_views.py @@ -3,7 +3,7 @@ import pytest from nose.tools import * # flake8: noqa -from website.models import Node, NodeLog +from osf.models import AbstractNode as Node, NodeLog from website.util.sanitize import strip_html from website.util import disconnected_from_listeners from website.project.signals import contributor_removed diff --git a/api_tests/comments/views/test_comment_detail.py b/api_tests/comments/views/test_comment_detail.py index f5fd146dade5..130e26dde777 100644 --- a/api_tests/comments/views/test_comment_detail.py +++ b/api_tests/comments/views/test_comment_detail.py @@ -5,7 +5,7 @@ from nose.tools import * # flake8: noqa from framework.auth import core -from framework.guid.model import Guid +from osf.models import Guid from api.base.settings.defaults import API_BASE from api.base.settings import osf_settings diff --git a/api_tests/comments/views/test_comment_report_detail.py b/api_tests/comments/views/test_comment_report_detail.py index 266f66e94259..78341dae53c6 100644 --- a/api_tests/comments/views/test_comment_report_detail.py +++ b/api_tests/comments/views/test_comment_report_detail.py @@ -4,7 +4,7 @@ from nose.tools import * # flake8: noqa from datetime import datetime -from framework.guid.model import Guid +from osf.models import Guid from api.base.settings.defaults import API_BASE from api_tests import utils as test_utils diff --git a/api_tests/comments/views/test_comment_report_list.py b/api_tests/comments/views/test_comment_report_list.py index 0a772d31349c..67c8db980600 100644 --- a/api_tests/comments/views/test_comment_report_list.py +++ b/api_tests/comments/views/test_comment_report_list.py @@ -4,7 +4,7 @@ from nose.tools import * # flake8: noqa from datetime import datetime -from framework.guid.model import Guid +from osf.models import Guid from api.base.settings.defaults import API_BASE from api_tests import utils as test_utils diff --git a/api_tests/files/views/test_file_detail.py b/api_tests/files/views/test_file_detail.py index 334812d4e285..856bee395ddf 100644 --- a/api_tests/files/views/test_file_detail.py +++ b/api_tests/files/views/test_file_detail.py @@ -10,12 +10,11 @@ from api.base.settings.defaults import API_BASE from api_tests import utils as api_utils from framework.auth.core import Auth -from framework.sessions.model import Session from osf_tests.factories import (AuthUserFactory, CommentFactory, ProjectFactory, UserFactory) from tests.base import ApiTestCase, capture_signals from website import settings as website_settings -from website.project.model import NodeLog +from osf.models import NodeLog, Session from website.project.signals import contributor_removed diff --git a/api_tests/files/views/test_file_list.py b/api_tests/files/views/test_file_list.py index 3f8bc845b8f7..8147e9078d16 100644 --- a/api_tests/files/views/test_file_list.py +++ b/api_tests/files/views/test_file_list.py @@ -9,7 +9,6 @@ from tests.base import ApiTestCase from osf_tests.factories import ( ProjectFactory, - UserFactory, AuthUserFactory, ) diff --git a/api_tests/guids/views/test_guid_detail.py b/api_tests/guids/views/test_guid_detail.py index bc0df4736314..4d9679006f22 100644 --- a/api_tests/guids/views/test_guid_detail.py +++ b/api_tests/guids/views/test_guid_detail.py @@ -2,7 +2,7 @@ from api.base.settings.defaults import API_BASE import pytest -from website.files.models.osfstorage import OsfStorageFile +from addons.osfstorage.models import OsfStorageFile from website.settings import API_DOMAIN from tests.base import ApiTestCase diff --git a/api_tests/identifiers/views/test_identifier_detail.py b/api_tests/identifiers/views/test_identifier_detail.py index 58447e6035ea..b903ed888c74 100644 --- a/api_tests/identifiers/views/test_identifier_detail.py +++ b/api_tests/identifiers/views/test_identifier_detail.py @@ -2,7 +2,7 @@ from nose.tools import * # flake8: noqa import urlparse from api.base.settings.defaults import API_BASE -from website.identifiers.model import Identifier +from osf.models import Identifier from tests.base import ApiTestCase from osf_tests.factories import ( diff --git a/api_tests/identifiers/views/test_identifier_list.py b/api_tests/identifiers/views/test_identifier_list.py index d46557599502..d32564dc5226 100644 --- a/api_tests/identifiers/views/test_identifier_list.py +++ b/api_tests/identifiers/views/test_identifier_list.py @@ -4,7 +4,7 @@ from nose.tools import * # flake8: noqa from api.base.settings.defaults import API_BASE -from website.identifiers.model import Identifier +from osf.models import Identifier from tests.base import ApiTestCase from osf_tests.factories import ( diff --git a/api_tests/institutions/views/test_institution_auth.py b/api_tests/institutions/views/test_institution_auth.py index 97550c79ec46..ba591caebd20 100644 --- a/api_tests/institutions/views/test_institution_auth.py +++ b/api_tests/institutions/views/test_institution_auth.py @@ -14,7 +14,7 @@ from api.base import settings from api.base.settings.defaults import API_BASE from framework.auth import signals -from website.models import User +from osf.models import OSFUser as User class TestInstitutionAuth(ApiTestCase): diff --git a/api_tests/institutions/views/test_institution_list.py b/api_tests/institutions/views/test_institution_list.py index 01a232406034..542b0f171245 100644 --- a/api_tests/institutions/views/test_institution_list.py +++ b/api_tests/institutions/views/test_institution_list.py @@ -3,7 +3,7 @@ from tests.base import ApiTestCase from osf_tests.factories import InstitutionFactory -from website.models import Node +from osf.models import AbstractNode as Node from api.base.settings.defaults import API_BASE class TestInstitutionList(ApiTestCase): diff --git a/api_tests/licenses/views/test_license_detail.py b/api_tests/licenses/views/test_license_detail.py index ce64dddbd310..2c03468e58c9 100644 --- a/api_tests/licenses/views/test_license_detail.py +++ b/api_tests/licenses/views/test_license_detail.py @@ -2,7 +2,7 @@ import functools from tests.base import ApiTestCase -from website.project.licenses import NodeLicense +from osf.models.licenses import NodeLicense from website.project.licenses import ensure_licenses from api.base.settings.defaults import API_BASE diff --git a/api_tests/licenses/views/test_license_list.py b/api_tests/licenses/views/test_license_list.py index 39709d297349..c577d3cdaa9b 100644 --- a/api_tests/licenses/views/test_license_list.py +++ b/api_tests/licenses/views/test_license_list.py @@ -2,7 +2,7 @@ import functools from tests.base import ApiTestCase -from website.project.licenses import NodeLicense +from osf.models.licenses import NodeLicense from website.project.licenses import ensure_licenses from api.base.settings.defaults import API_BASE diff --git a/api_tests/logs/views/test_log_detail.py b/api_tests/logs/views/test_log_detail.py index 54e99e9a7c35..d7c6cd909069 100644 --- a/api_tests/logs/views/test_log_detail.py +++ b/api_tests/logs/views/test_log_detail.py @@ -11,7 +11,7 @@ from framework.auth.core import Auth -from website.models import NodeLog +from osf.models import NodeLog from website.util import permissions as osf_permissions from api.base.settings.defaults import API_BASE from api_tests import utils as api_utils diff --git a/api_tests/metaschemas/views/test_metaschemas_detail.py b/api_tests/metaschemas/views/test_metaschemas_detail.py index a5ed4d43920e..c86e083d1c33 100644 --- a/api_tests/metaschemas/views/test_metaschemas_detail.py +++ b/api_tests/metaschemas/views/test_metaschemas_detail.py @@ -1,7 +1,8 @@ from nose.tools import * # flake8: noqa from website.project.metadata.schemas import ACTIVE_META_SCHEMAS, LATEST_SCHEMA_VERSION -from website.project.model import ensure_schemas, MetaSchema, Q +from website.project.model import ensure_schemas, Q +from osf.models import MetaSchema from api.base.settings.defaults import API_BASE diff --git a/api_tests/nodes/views/test_node_children_list.py b/api_tests/nodes/views/test_node_children_list.py index b1b18e4f94bd..88896c684e02 100644 --- a/api_tests/nodes/views/test_node_children_list.py +++ b/api_tests/nodes/views/test_node_children_list.py @@ -3,7 +3,7 @@ from framework.auth.core import Auth -from website.models import Node, NodeLog +from osf.models import AbstractNode as Node, NodeLog from website.util import permissions from website.util.sanitize import strip_html diff --git a/api_tests/nodes/views/test_node_comments_list.py b/api_tests/nodes/views/test_node_comments_list.py index ff15fe8e98c6..056ccb63ab7e 100644 --- a/api_tests/nodes/views/test_node_comments_list.py +++ b/api_tests/nodes/views/test_node_comments_list.py @@ -4,7 +4,7 @@ from nose.tools import * # flake8: noqa from framework.auth import core -from framework.guid.model import Guid +from osf.models import Guid from api.base.settings.defaults import API_BASE from api.base.settings import osf_settings diff --git a/api_tests/nodes/views/test_node_contributors_detail.py b/api_tests/nodes/views/test_node_contributors_detail.py index 866d371a19ac..ec5d6692416f 100644 --- a/api_tests/nodes/views/test_node_contributors_detail.py +++ b/api_tests/nodes/views/test_node_contributors_detail.py @@ -1,8 +1,8 @@ # -*- coding: utf-8 -*- from nose.tools import * # flake8: noqa -from website.models import NodeLog -from website.project.model import Auth +from osf.models import NodeLog +from framework.auth.core import Auth from website.util import permissions from api.base.settings.defaults import API_BASE diff --git a/api_tests/nodes/views/test_node_contributors_list.py b/api_tests/nodes/views/test_node_contributors_list.py index 495fb0a045e5..6be8bbb43f09 100644 --- a/api_tests/nodes/views/test_node_contributors_list.py +++ b/api_tests/nodes/views/test_node_contributors_list.py @@ -23,7 +23,7 @@ ) from tests.utils import assert_logs -from website.models import NodeLog +from osf.models import NodeLog from website.project.signals import contributor_added, unreg_contributor_added, contributor_removed from website.util import permissions, disconnected_from_listeners diff --git a/api_tests/nodes/views/test_node_detail.py b/api_tests/nodes/views/test_node_detail.py index 860084b3b686..651f8070ee13 100644 --- a/api_tests/nodes/views/test_node_detail.py +++ b/api_tests/nodes/views/test_node_detail.py @@ -7,7 +7,7 @@ from modularodm import Q import pytest -from website.models import NodeLog +from osf.models import NodeLog from website.views import find_bookmark_collection from website.util import permissions from website.util.sanitize import strip_html @@ -27,7 +27,7 @@ ) from website.project.licenses import ensure_licenses -from website.project.licenses import NodeLicense +from osf.models.licenses import NodeLicense ensure_licenses = functools.partial(ensure_licenses, warn=False) diff --git a/api_tests/nodes/views/test_node_draft_registration_detail.py b/api_tests/nodes/views/test_node_draft_registration_detail.py index 253deae18ee2..af68acd78034 100644 --- a/api_tests/nodes/views/test_node_draft_registration_detail.py +++ b/api_tests/nodes/views/test_node_draft_registration_detail.py @@ -2,7 +2,7 @@ from nose.tools import * # flake8: noqa from modularodm import Q -from website.models import MetaSchema +from osf.models import MetaSchema from website.project.metadata.schemas import LATEST_SCHEMA_VERSION from api.base.settings.defaults import API_BASE from website.settings import PREREG_ADMIN_TAG diff --git a/api_tests/nodes/views/test_node_draft_registration_list.py b/api_tests/nodes/views/test_node_draft_registration_list.py index 3344884c5c1c..9d6d7012eaa2 100644 --- a/api_tests/nodes/views/test_node_draft_registration_list.py +++ b/api_tests/nodes/views/test_node_draft_registration_list.py @@ -1,7 +1,7 @@ from nose.tools import * # flake8: noqa from website.project.model import ensure_schemas -from website.models import MetaSchema +from osf.models import MetaSchema from website.project.metadata.schemas import LATEST_SCHEMA_VERSION from website.project.metadata.utils import create_jsonschema_from_metaschema from modularodm import Q diff --git a/api_tests/nodes/views/test_node_files_list.py b/api_tests/nodes/views/test_node_files_list.py index 2e10a7edb2d8..0732a721f85b 100644 --- a/api_tests/nodes/views/test_node_files_list.py +++ b/api_tests/nodes/views/test_node_files_list.py @@ -9,7 +9,7 @@ from framework.auth.core import Auth from addons.github.tests.factories import GitHubAccountFactory -from website.models import Node +from osf.models import AbstractNode as Node from website.util import waterbutler_api_url_for from api.base.settings.defaults import API_BASE from api_tests import utils as api_utils diff --git a/api_tests/nodes/views/test_node_forks_list.py b/api_tests/nodes/views/test_node_forks_list.py index 34826733f0b7..2b7aa678f589 100644 --- a/api_tests/nodes/views/test_node_forks_list.py +++ b/api_tests/nodes/views/test_node_forks_list.py @@ -2,7 +2,7 @@ from framework.auth.core import Auth -from website.models import Node +from osf.models import AbstractNode as Node from website.util import permissions from api.base.settings.defaults import API_BASE diff --git a/api_tests/nodes/views/test_node_links_detail.py b/api_tests/nodes/views/test_node_links_detail.py index 1c3e6793967e..925a008ee78c 100644 --- a/api_tests/nodes/views/test_node_links_detail.py +++ b/api_tests/nodes/views/test_node_links_detail.py @@ -3,7 +3,7 @@ from urlparse import urlparse from framework.auth.core import Auth -from website.models import NodeLog +from osf.models import NodeLog from api.base.settings.defaults import API_BASE diff --git a/api_tests/nodes/views/test_node_links_list.py b/api_tests/nodes/views/test_node_links_list.py index 0c41b179eb18..6962e481bb75 100644 --- a/api_tests/nodes/views/test_node_links_list.py +++ b/api_tests/nodes/views/test_node_links_list.py @@ -3,7 +3,7 @@ from urlparse import urlparse from framework.auth.core import Auth -from website.models import NodeLog +from osf.models import NodeLog from api.base.settings.defaults import API_BASE diff --git a/api_tests/nodes/views/test_node_list.py b/api_tests/nodes/views/test_node_list.py index 0226666ba0fa..4147182dd23e 100644 --- a/api_tests/nodes/views/test_node_list.py +++ b/api_tests/nodes/views/test_node_list.py @@ -6,7 +6,7 @@ from modularodm import Q from framework.auth.core import Auth -from website.models import Node, NodeLog +from osf.models import AbstractNode as Node, NodeLog from website.util import permissions from website.util.sanitize import strip_html from website.views import find_bookmark_collection diff --git a/api_tests/nodes/views/test_node_logs.py b/api_tests/nodes/views/test_node_logs.py index a807dc425d7f..575161d5d3fe 100644 --- a/api_tests/nodes/views/test_node_logs.py +++ b/api_tests/nodes/views/test_node_logs.py @@ -6,7 +6,7 @@ import pytest from framework.auth.core import Auth -from website.models import NodeLog +from osf.models import NodeLog from website.util import disconnected_from_listeners from website.project.signals import contributor_removed from api.base.settings.defaults import API_BASE diff --git a/api_tests/nodes/views/test_node_preprints.py b/api_tests/nodes/views/test_node_preprints.py index a52868502685..0e24b790ca1b 100644 --- a/api_tests/nodes/views/test_node_preprints.py +++ b/api_tests/nodes/views/test_node_preprints.py @@ -6,8 +6,8 @@ from api_tests.preprints.filters.test_filters import PreprintsListFilteringMixin from api_tests.preprints.views.test_preprint_list_mixin import PreprintIsPublishedListMixin, PreprintIsValidListMixin -from website.preprints.model import PreprintService -from website.files.models.osfstorage import OsfStorageFile +from osf.models import PreprintService +from addons.osfstorage.models import OsfStorageFile from osf_tests.factories import PreprintFactory, AuthUserFactory, ProjectFactory, SubjectFactory, PreprintProviderFactory from api_tests import utils as test_utils diff --git a/api_tests/nodes/views/test_view_only_query_parameter.py b/api_tests/nodes/views/test_view_only_query_parameter.py index 7b057e375cbe..1388b6807897 100644 --- a/api_tests/nodes/views/test_view_only_query_parameter.py +++ b/api_tests/nodes/views/test_view_only_query_parameter.py @@ -9,7 +9,7 @@ AuthUserFactory, PrivateLinkFactory, ) -from website.models import Node +from osf.models import AbstractNode as Node class ViewOnlyTestCase(ApiTestCase): diff --git a/api_tests/preprints/views/test_preprint_list.py b/api_tests/preprints/views/test_preprint_list.py index 2cdc39b34e56..1fb83005107d 100644 --- a/api_tests/preprints/views/test_preprint_list.py +++ b/api_tests/preprints/views/test_preprint_list.py @@ -25,7 +25,7 @@ def build_preprint_create_payload(node_id=None, provider_id=None, file_id=None, payload = { "data": { "attributes": attrs, - "relationships": {}, + "relationships": {}, "type": "preprints" } } diff --git a/api_tests/registrations/views/test_registration_forks.py b/api_tests/registrations/views/test_registration_forks.py index df3b2edc9737..5b9ef55858d4 100644 --- a/api_tests/registrations/views/test_registration_forks.py +++ b/api_tests/registrations/views/test_registration_forks.py @@ -4,7 +4,7 @@ from framework.auth.core import Auth -from website.models import Node +from osf.models import AbstractNode as Node from website.util import permissions from api.base.settings.defaults import API_BASE diff --git a/api_tests/registrations/views/test_registration_list.py b/api_tests/registrations/views/test_registration_list.py index 6df728c8d160..7f740fc97747 100644 --- a/api_tests/registrations/views/test_registration_list.py +++ b/api_tests/registrations/views/test_registration_list.py @@ -8,7 +8,7 @@ from website.project.model import ensure_schemas from website.project.metadata.schemas import LATEST_SCHEMA_VERSION -from website.models import Node, MetaSchema, DraftRegistration +from osf.models import AbstractNode as Node, MetaSchema, DraftRegistration from website.views import find_bookmark_collection from framework.auth.core import Auth, Q from api.base.settings.defaults import API_BASE diff --git a/api_tests/search/serializers/test_serializers.py b/api_tests/search/serializers/test_serializers.py index 1051e04cf598..4089cbdfcc5a 100644 --- a/api_tests/search/serializers/test_serializers.py +++ b/api_tests/search/serializers/test_serializers.py @@ -12,7 +12,7 @@ ) from tests.utils import make_drf_request_with_version, mock_archive -from website.models import MetaSchema +from osf.models import MetaSchema from website.project.model import ensure_schemas from website.project.metadata.schemas import LATEST_SCHEMA_VERSION from website.search import search diff --git a/api_tests/search/views/test_views.py b/api_tests/search/views/test_views.py index d955a5a9d7da..6f022defcc36 100644 --- a/api_tests/search/views/test_views.py +++ b/api_tests/search/views/test_views.py @@ -15,7 +15,7 @@ ) from osf_tests.utils import mock_archive -from website.models import MetaSchema +from osf.models import MetaSchema from website.project.model import ensure_schemas from website.project.metadata.schemas import LATEST_SCHEMA_VERSION from website.search import search diff --git a/api_tests/taxonomies/views/test_taxonomy_list.py b/api_tests/taxonomies/views/test_taxonomy_list.py index c884fcd5d0e6..628adeab49b0 100644 --- a/api_tests/taxonomies/views/test_taxonomy_list.py +++ b/api_tests/taxonomies/views/test_taxonomy_list.py @@ -3,7 +3,7 @@ from modularodm import Q from tests.base import ApiTestCase from osf_tests.factories import SubjectFactory -from website.project.taxonomies import Subject +from osf.models import Subject from api.base.settings.defaults import API_BASE diff --git a/api_tests/tokens/views/test_token_detail.py b/api_tests/tokens/views/test_token_detail.py index c23f11f58657..fa30a03dc793 100644 --- a/api_tests/tokens/views/test_token_detail.py +++ b/api_tests/tokens/views/test_token_detail.py @@ -3,7 +3,7 @@ from nose.tools import * # flake8: noqa -from website.models import User, ApiOAuth2PersonalToken +from osf.models import ApiOAuth2PersonalToken from website.util import api_v2_url from tests.base import ApiTestCase diff --git a/api_tests/tokens/views/test_token_list.py b/api_tests/tokens/views/test_token_list.py index cad655c38f4e..94c68770302f 100644 --- a/api_tests/tokens/views/test_token_list.py +++ b/api_tests/tokens/views/test_token_list.py @@ -3,7 +3,7 @@ from nose.tools import * # flake8: noqa -from website.models import User, ApiOAuth2PersonalToken +from osf.models import ApiOAuth2PersonalToken from website.util import api_v2_url from website.util import sanitize diff --git a/api_tests/users/views/test_user_detail.py b/api_tests/users/views/test_user_detail.py index 1167526c3f92..43014e816a37 100644 --- a/api_tests/users/views/test_user_detail.py +++ b/api_tests/users/views/test_user_detail.py @@ -2,7 +2,7 @@ import urlparse from nose.tools import * # flake8: noqa -from website.models import Node +from osf.models import AbstractNode as Node from website.util.sanitize import strip_html from website.views import find_bookmark_collection diff --git a/api_tests/users/views/test_user_list.py b/api_tests/users/views/test_user_list.py index a32cb3f7a5ea..018acf48f67f 100644 --- a/api_tests/users/views/test_user_list.py +++ b/api_tests/users/views/test_user_list.py @@ -16,10 +16,8 @@ from api.base.settings.defaults import API_BASE from framework.auth.cas import CasResponse -from framework.sessions.model import Session -from website.models import User +from osf.models import OSFUser as User, Session, ApiOAuth2PersonalToken from website import settings -from website.oauth.models import ApiOAuth2PersonalToken from website.util.permissions import CREATOR_PERMISSIONS diff --git a/api_tests/wikis/views/test_wiki_detail.py b/api_tests/wikis/views/test_wiki_detail.py index cdaeef9d0d6e..f9db102be628 100644 --- a/api_tests/wikis/views/test_wiki_detail.py +++ b/api_tests/wikis/views/test_wiki_detail.py @@ -6,7 +6,7 @@ from api.base.settings.defaults import API_BASE -from framework.guid.model import Guid +from osf.models import Guid from addons.wiki.models import NodeWikiPage diff --git a/framework/addons/__init__.py b/framework/addons/__init__.py index 52ecb065fcd3..e69de29bb2d1 100644 --- a/framework/addons/__init__.py +++ b/framework/addons/__init__.py @@ -1,145 +0,0 @@ -# -*- coding: utf-8 -*- - -from modularodm import Q - -from framework.mongo import StoredObject -from website import settings - - -class AddonModelMixin(StoredObject): - - _meta = { - 'abstract': True, - } - - @property - def addons(self): - return self.get_addons() - - def get_addons(self): - addons = [] - for addon_name in settings.ADDONS_AVAILABLE_DICT.keys(): - addon = self.get_addon(addon_name) - if addon: - addons.append(addon) - return addons - - def get_oauth_addons(self): - # TODO: Using hasattr is a dirty hack - we should be using issubclass(). - # We can't, because importing the parent classes here causes a - # circular import error. - return [ - addon for addon in self.get_addons() - if hasattr(addon, 'oauth_provider') - ] - - def get_addon_names(self): - return [ - addon.config.short_name - for addon in self.get_addons() - ] - - def get_addon(self, addon_name, deleted=False): - """Get addon for node. - - :param str addon_name: Name of addon - :return: Settings record if found, else None - - """ - addon_config = settings.ADDONS_AVAILABLE_DICT.get(addon_name) - if not addon_config or not addon_config.settings_models.get(self._name): - return False - - addon_class = addon_config.settings_models[self._name] - addons = list(addon_class.find(Q('owner', 'eq', self))) - if addons: - if deleted or not addons[0].deleted: - assert len(addons) == 1, 'Violation of one-to-one mapping with addon model' - return addons[0] - return None - - def has_addon(self, addon_name, deleted=False): - return bool(self.get_addon(addon_name, deleted=deleted)) - - def add_addon(self, addon_name, auth=None, override=False, _force=False): - """Add an add-on to the node. - - :param str addon_name: Name of add-on - :param Auth auth: Consolidated authorization object - :param bool override: For shell use only, Allows adding of system addons - :param bool _force: For migration testing ONLY. Do not set to True - in the application, or else projects will be allowed to have - duplicate addons! - :return bool: Add-on was added - - """ - if not override and addon_name in settings.SYSTEM_ADDED_ADDONS[self._name]: - return False - - # Reactivate deleted add-on if present - addon = self.get_addon(addon_name, deleted=True) - if addon: - if addon.deleted: - addon.undelete(save=True) - return True - if not _force: - return False - - # Get add-on settings model - addon_config = settings.ADDONS_AVAILABLE_DICT.get(addon_name) - if not addon_config or not addon_config.settings_models[self._name]: - return False - - # Instantiate model - model = addon_config.settings_models[self._name](owner=self) - model.on_add() - model.save() - - return True - - def get_or_add_addon(self, addon_name, **kwargs): - """Get addon from owner; if it doesn't exist, create one. - - :param str addon_name: Name of addon - :return: Addon settings record - """ - addon = self.get_addon(addon_name) - if addon: - return addon - self.add_addon(addon_name, **kwargs) - return self.get_addon(addon_name) - - def delete_addon(self, addon_name, auth=None, _force=False): - """Delete an add-on from the node. - - :param str addon_name: Name of add-on - :param Auth auth: Consolidated authorization object - :param bool _force: For migration testing ONLY. Do not set to True - in the application, or else projects will be allowed to delete - mandatory add-ons! - :return bool: Add-on was deleted - """ - addon = self.get_addon(addon_name) - if addon: - if self._name in addon.config.added_mandatory and not _force: - raise ValueError('Cannot delete mandatory add-on.') - if getattr(addon, 'external_account', None): - addon.deauthorize(auth=auth) - addon.delete(save=True) - return True - return False - - def config_addons(self, config, auth=None, save=True): - """Enable or disable a set of add-ons. - - :param dict config: Mapping between add-on names and enabled / disabled - statuses - """ - - for addon_name, enabled in config.iteritems(): - if enabled: - self.add_addon(addon_name, auth) - else: - self.delete_addon(addon_name, auth) - if save: - self.save() diff --git a/framework/auth/__init__.py b/framework/auth/__init__.py index c7e6df07fae2..bc16eb7125a5 100644 --- a/framework/auth/__init__.py +++ b/framework/auth/__init__.py @@ -3,7 +3,7 @@ from framework import bcrypt from framework.auth import signals -from framework.auth.core import User, Auth +from framework.auth.core import Auth from framework.auth.core import get_user, generate_verification_key from framework.auth.exceptions import DuplicateEmailError from framework.sessions import session, create_session @@ -13,7 +13,6 @@ __all__ = [ 'get_display_name', 'Auth', - 'User', 'get_user', 'check_password', 'authenticate', @@ -85,9 +84,10 @@ def logout(): def register_unconfirmed(username, password, fullname, campaign=None): + from osf.models import OSFUser user = get_user(email=username) if not user: - user = User.create_unconfirmed( + user = OSFUser.create_unconfirmed( username=username, password=password, fullname=fullname, @@ -103,7 +103,7 @@ def register_unconfirmed(username, password, fullname, campaign=None): user.update_guessed_names() user.save() else: - raise DuplicateEmailError('User {0!r} already exists'.format(username)) + raise DuplicateEmailError('OSFUser {0!r} already exists'.format(username)) return user @@ -117,12 +117,13 @@ def get_or_create_user(fullname, address, reset_password=True, is_spam=False): :param bool is_spam: user flagged as potential spam :return: tuple of (user, created) """ + from osf.models import OSFUser user = get_user(email=address) if user: return user, False else: password = str(uuid.uuid4()) - user = User.create_confirmed(address, password, fullname) + user = OSFUser.create_confirmed(address, password, fullname) if reset_password: user.verification_key_v2 = generate_verification_key(verification_type='password') if is_spam: diff --git a/framework/auth/cas.py b/framework/auth/cas.py index 4d37a8f71fd3..e2091cd778af 100644 --- a/framework/auth/cas.py +++ b/framework/auth/cas.py @@ -8,7 +8,6 @@ from lxml import etree import requests -from framework.auth import User from framework.auth import authenticate, external_first_login_authenticate from framework.auth.core import get_user, generate_verification_key from framework.flask import redirect @@ -321,9 +320,9 @@ def get_user_from_cas_resp(cas_resp): :param cas_resp: the cas service validation response :return: the user, the external_credential, and the next action """ - + from osf.models import OSFUser if cas_resp.user: - user = User.objects.filter(guids___id=cas_resp.user).first() + user = OSFUser.objects.filter(guids___id=cas_resp.user).first() # cas returns a valid OSF user id if user: return user, None, 'authenticate' diff --git a/framework/auth/core.py b/framework/auth/core.py index e1a8a2348213..a1688ecab36b 100644 --- a/framework/auth/core.py +++ b/framework/auth/core.py @@ -2,40 +2,16 @@ import datetime as dt -import framework import itertools import logging -import re -import urllib -import urlparse -from copy import deepcopy -from framework import analytics -import bson -import itsdangerous -import pytz from django.utils import timezone -from flask import Request as FlaskRequest -from framework.addons import AddonModelMixin -from framework.auth import signals, utils -from framework.auth.exceptions import (ChangePasswordError, ExpiredTokenError, - InvalidTokenError, - MergeConfirmedRequiredError, - MergeConflictError) -from framework.bcrypt import check_password_hash, generate_password_hash -from framework.exceptions import PermissionsError -from framework.guid.model import GuidStoredObject -from framework.mongo import get_cache_key from framework.mongo.validators import string_required -from framework.sentry import log_exception from framework.sessions import session -from framework.sessions.model import Session -from framework.sessions.utils import remove_sessions_for_user -from modularodm import Q, fields -from modularodm.exceptions import (NoResultsFound, QueryException, - ValidationError, ValidationValueError) +from modularodm import Q +from modularodm.exceptions import QueryException, ValidationError, ValidationValueError from modularodm.validators import URLValidator -from website import filters, mails, security, settings +from website import security, settings name_formatters = { 'long': lambda user: user.fullname, @@ -118,9 +94,10 @@ def get_current_user_id(): # TODO - rename to _get_current_user_from_session /HRYBACKI def _get_current_user(): + from osf.models import OSFUser current_user_id = get_current_user_id() if current_user_id: - return User.load(current_user_id) + return OSFUser.load(current_user_id) else: return None @@ -142,6 +119,7 @@ def get_user(email=None, password=None, token=None, external_id_provider=None, e :param external_id: the external id :rtype User or None """ + from osf.models import OSFUser if password and not email: raise AssertionError('If a password is provided, an email must also be provided.') @@ -158,7 +136,7 @@ def get_user(email=None, password=None, token=None, external_id_provider=None, e query = query_list[0] for query_part in query_list[1:]: query = query & query_part - user = User.find_one(query) + user = OSFUser.find_one(query) except Exception as err: logger.error(err) user = None @@ -176,7 +154,7 @@ def get_user(email=None, password=None, token=None, external_id_provider=None, e query = query_list[0] for query_part in query_list[1:]: query = query & query_part - user = User.find_one(query) + user = OSFUser.find_one(query) return user except Exception as err: logger.error(err) @@ -205,7 +183,7 @@ def private_link(self): return None try: # Avoid circular import - from website.project.model import PrivateLink + from osf.models import PrivateLink private_link = PrivateLink.find_one( Q('key', 'eq', self.private_key) ) @@ -228,1420 +206,6 @@ def from_kwargs(cls, request_args, kwargs): ) -class User(GuidStoredObject, AddonModelMixin): - - # Node fields that trigger an update to the search engine on save - SEARCH_UPDATE_FIELDS = { - 'fullname', - 'given_name', - 'middle_names', - 'family_name', - 'suffix', - 'merged_by', - 'date_disabled', - 'date_confirmed', - 'jobs', - 'schools', - 'social', - } - - # TODO: Add SEARCH_UPDATE_NODE_FIELDS, for fields that should trigger a - # search update for all nodes to which the user is a contributor. - - SOCIAL_FIELDS = { - 'orcid': u'http://orcid.org/{}', - 'github': u'http://github.com/{}', - 'scholar': u'http://scholar.google.com/citations?user={}', - 'twitter': u'http://twitter.com/{}', - 'profileWebsites': [], - 'linkedIn': u'https://www.linkedin.com/{}', - 'impactStory': u'https://impactstory.org/{}', - 'researcherId': u'http://researcherid.com/rid/{}', - 'researchGate': u'https://researchgate.net/profile/{}', - 'academiaInstitution': u'https://{}', - 'academiaProfileID': u'.academia.edu/{}', - 'baiduScholar': u'http://xueshu.baidu.com/scholarID/{}', - 'ssrn': u'http://papers.ssrn.com/sol3/cf_dev/AbsByAuth.cfm?per_id={}', - } - - # This is a GuidStoredObject, so this will be a GUID. - _id = fields.StringField(primary=True) - - # The primary email address for the account. - # This value is unique, but multiple "None" records exist for: - # * unregistered contributors where an email address was not provided. - # TODO: Update mailchimp subscription on username change in user.save() - username = fields.StringField(required=False, unique=True, index=True) - - # Hashed. Use `User.set_password` and `User.check_password` - password = fields.StringField() - - fullname = fields.StringField(required=True, validate=string_required) - - # user has taken action to register the account - is_registered = fields.BooleanField(index=True) - - # user has claimed the account - # TODO: This should be retired - it always reflects is_registered. - # While a few entries exist where this is not the case, they appear to be - # the result of a bug, as they were all created over a small time span. - is_claimed = fields.BooleanField(default=False, index=True) - - # a list of strings - for internal use - system_tags = fields.StringField(list=True) - - # security emails that have been sent - # TODO: This should be removed and/or merged with system_tags - security_messages = fields.DictionaryField() - # Format: { - # : - # ... - # } - - # user was invited (as opposed to registered unprompted) - is_invited = fields.BooleanField(default=False, index=True) - - # Per-project unclaimed user data: - # TODO: add validation - unclaimed_records = fields.DictionaryField(required=False) - # Format: { - # : { - # 'name': , - # 'referrer_id': , - # 'token': , - # 'expires': , - # 'email': , - # 'claimer_email': , - # 'last_sent': - # } - # ... - # } - - # Time of last sent notification email to newly added contributors - contributor_added_email_records = fields.DictionaryField(default=dict) - # Format : { - # : { - # 'last_sent': time.time() - # } - # ... - # } - - # The user into which this account was merged - merged_by = fields.ForeignField('user', default=None, index=True) - - # verification key v1: only the token string, no expiration time - # used for cas login with username and verification key - verification_key = fields.StringField() - - # verification key v2: token, and expiration time - # used for password reset, confirm account/email, claim account/contributor-ship - verification_key_v2 = fields.DictionaryField(default=dict) - # Format: { - # 'token': - # 'expires': - # } - - email_last_sent = fields.DateTimeField() - - # confirmed emails - # emails should be stripped of whitespace and lower-cased before appending - # TODO: Add validator to ensure an email address only exists once across - # all User's email lists - emails = fields.StringField(list=True) - - # email verification tokens - # see also ``unconfirmed_emails`` - email_verifications = fields.DictionaryField(default=dict) - # Format: { - # : { - # 'email': , - # 'expiration': , - # 'confirmed': whether user is confirmed or not, - # 'external_identity': user's external identity, - # } - # } - - # TODO remove this field once migration (scripts/migration/migrate_mailing_lists_to_mailchimp_fields.py) - # has been run. This field is deprecated and replaced with mailchimp_mailing_lists - mailing_lists = fields.DictionaryField() - - # email lists to which the user has chosen a subscription setting - mailchimp_mailing_lists = fields.DictionaryField() - # Format: { - # 'list1': True, - # 'list2: False, - # ... - # } - - # email lists to which the user has chosen a subscription setting, being sent from osf, rather than mailchimp - osf_mailing_lists = fields.DictionaryField(default=lambda: {settings.OSF_HELP_LIST: True}) - # Format: { - # 'list1': True, - # 'list2: False, - # ... - # } - - # the date this user was registered - # TODO: consider removal - this can be derived from date_registered - date_registered = fields.DateTimeField(auto_now_add=dt.datetime.utcnow, - index=True) - - # watched nodes are stored via a list of WatchConfigs - watched = fields.ForeignField('WatchConfig', list=True) - - # list of collaborators that this user recently added to nodes as a contributor - recently_added = fields.ForeignField('user', list=True) - - # Attached external accounts (OAuth) - external_accounts = fields.ForeignField('externalaccount', list=True) - - # CSL names - given_name = fields.StringField() - middle_names = fields.StringField() - family_name = fields.StringField() - suffix = fields.StringField() - - # identity for user logged in through external idp - external_identity = fields.DictionaryField() - # Format: { - # : { - # : , - # ... - # }, - # ... - # } - - # Employment history - jobs = fields.DictionaryField(list=True, validate=validate_history_item) - # Format: { - # 'title': , - # 'institution': , - # 'department': , - # 'location': , - # 'startMonth': , - # 'startYear': , - # 'endMonth': , - # 'endYear': , - # 'ongoing: - # } - - # Educational history - schools = fields.DictionaryField(list=True, validate=validate_history_item) - # Format: { - # 'degree': , - # 'institution': , - # 'department': , - # 'location': , - # 'startMonth': , - # 'startYear': , - # 'endMonth': , - # 'endYear': , - # 'ongoing: - # } - - # Social links - social = fields.DictionaryField(validate=validate_social) - # Format: { - # 'profileWebsites': - # 'twitter': , - # } - - # date the user last sent a request - date_last_login = fields.DateTimeField() - - # date the user first successfully confirmed an email address - date_confirmed = fields.DateTimeField(index=True) - - # When the user was disabled. - date_disabled = fields.DateTimeField(index=True) - - # when comments were last viewed - comments_viewed_timestamp = fields.DictionaryField() - # Format: { - # 'Comment.root_target._id': 'timestamp', - # ... - # } - - # timezone for user's locale (e.g. 'America/New_York') - timezone = fields.StringField(default='Etc/UTC') - - # user language and locale data (e.g. 'en_US') - locale = fields.StringField(default='en_US') - - # whether the user has requested to deactivate their account - requested_deactivation = fields.BooleanField(default=False) - - # dictionary of projects a user has changed the setting on - notifications_configured = fields.DictionaryField() - # Format: { - # : True - # ... - # } - - # If this user was created through the API, - # keep track of who added them. - registered_by = fields.ForeignField('user', default=None, index=True) - - _meta = {'optimistic': True} - - def __repr__(self): - return ''.format(self.username, self._id) - - def __str__(self): - return self.fullname.encode('ascii', 'replace') - - __unicode__ = __str__ - - # For compatibility with Django auth - @property - def pk(self): - return self._id - - @property - def email(self): - return self.username - - @property - def is_authenticated(self): # Needed for django compat - return True - - @property - def is_anonymous(self): - return False - - @property - def absolute_api_v2_url(self): - from website import util - return util.api_v2_url('users/{}/'.format(self.pk)) - - # used by django and DRF - def get_absolute_url(self): - if not self.is_registered: - return None - return self.absolute_api_v2_url - - @classmethod - def create_unregistered(cls, fullname, email=None): - """Create a new unregistered user. - """ - user = cls( - username=email, - fullname=fullname, - is_invited=True, - is_registered=False, - ) - user.update_guessed_names() - return user - - @classmethod - def create(cls, username, password, fullname): - utils.validate_email(username) # Raises ValidationError if spam address - - user = cls( - username=username, - fullname=fullname, - ) - user.update_guessed_names() - user.set_password(password) - return user - - @classmethod - def create_unconfirmed(cls, username, password, fullname, external_identity=None, - do_confirm=True, campaign=None): - """Create a new user who has begun registration but needs to verify - their primary email address (username). - """ - user = cls.create(username, password, fullname) - user.add_unconfirmed_email(username, external_identity=external_identity) - user.is_registered = False - if external_identity: - user.external_identity.update(external_identity) - if campaign: - # needed to prevent circular import - from framework.auth.campaigns import system_tag_for_campaign # skipci - user.add_system_tag(system_tag_for_campaign(campaign)) - return user - - @classmethod - def create_confirmed(cls, username, password, fullname): - user = cls.create(username, password, fullname) - user.is_registered = True - user.is_claimed = True - user.date_confirmed = user.date_registered - user.emails.append(username) - return user - - @classmethod - def from_cookie(cls, cookie, secret=None): - """Attempt to load a user from their signed cookie - :returns: None if a user cannot be loaded else User - """ - if not cookie: - return None - - secret = secret or settings.SECRET_KEY - - try: - token = itsdangerous.Signer(secret).unsign(cookie) - except itsdangerous.BadSignature: - return None - - user_session = Session.load(token) - - if user_session is None: - return None - - return cls.load(user_session.data.get('auth_user_id')) - - def get_or_create_cookie(self, secret=None): - """Find the cookie for the given user - Create a new session if no cookie is found - - :param str secret: The key to sign the cookie with - :returns: The signed cookie - """ - secret = secret or settings.SECRET_KEY - sessions = Session.find( - Q('data.auth_user_id', 'eq', self._id) - ).sort( - '-date_modified' - ).limit(1) - - if sessions.count() > 0: - user_session = sessions[0] - else: - user_session = Session(data={ - 'auth_user_id': self._id, - 'auth_user_username': self.username, - 'auth_user_fullname': self.fullname, - }) - user_session.save() - - signer = itsdangerous.Signer(secret) - return signer.sign(user_session._id) - - def update_guessed_names(self): - """Updates the CSL name fields inferred from the the full name. - """ - parsed = utils.impute_names(self.fullname) - self.given_name = parsed['given'] - self.middle_names = parsed['middle'] - self.family_name = parsed['family'] - self.suffix = parsed['suffix'] - - def register(self, username, password=None): - """Registers the user. - """ - self.username = username - if password: - self.set_password(password) - if username not in self.emails: - self.emails.append(username) - self.is_registered = True - self.is_claimed = True - self.date_confirmed = timezone.now() - self.update_search() - self.update_search_nodes() - - # Emit signal that a user has confirmed - signals.user_confirmed.send(self) - - return self - - def add_unclaimed_record(self, node, referrer, given_name, email=None): - """Add a new project entry in the unclaimed records dictionary. - - :param Node node: Node this unclaimed user was added to. - :param User referrer: User who referred this user. - :param str given_name: The full name that the referrer gave for this user. - :param str email: The given email address. - :returns: The added record - """ - if not node.can_edit(user=referrer): - raise PermissionsError( - 'Referrer does not have permission to add a contributor to project {0}'.format(node._primary_key) - ) - project_id = node._primary_key - referrer_id = referrer._primary_key - if email: - clean_email = email.lower().strip() - else: - clean_email = None - - verification_key = generate_verification_key(verification_type='claim') - record = { - 'name': given_name, - 'referrer_id': referrer_id, - 'token': verification_key['token'], - 'expires': verification_key['expires'], - 'email': clean_email, - } - self.unclaimed_records[project_id] = record - return record - - def display_full_name(self, node=None): - """Return the full name , as it would display in a contributor list for a - given node. - - NOTE: Unclaimed users may have a different name for different nodes. - """ - if node: - unclaimed_data = self.unclaimed_records.get(node._primary_key, None) - if unclaimed_data: - return unclaimed_data['name'] - return self.fullname - - @property - def is_active(self): - """Returns True if the user is active. The user must have activated - their account, must not be deleted, suspended, etc. - - :return: bool - """ - return (self.is_registered and - self.password is not None and - not self.is_merged and - not self.is_disabled and - self.is_confirmed) - - def get_unclaimed_record(self, project_id): - """ - Get an unclaimed record for a given project_id. Return the one record if found. Otherwise, raise ValueError. - - :param project_id, the project node id - :raises: ValueError if there is no record for the given project. - """ - - try: - return self.unclaimed_records[project_id] - except KeyError: # re-raise as ValueError - raise ValueError('No unclaimed record for user {self._id} on node {project_id}'.format(**locals())) - - def verify_claim_token(self, token, project_id): - """ - Verify the claim token for this user for a given node which she/he was added as a unregistered contributor for. - Return `True` if record found, token valid and not expired. Otherwise return False. - - :param token: the claim token - :param project_id: the project node id - """ - - try: - record = self.get_unclaimed_record(project_id) - except ValueError: # No unclaimed record for given pid - return False - valid = record['token'] == token - if 'expires' in record: - valid = valid and record['expires'] > timezone.now() - return valid - - def get_claim_url(self, project_id, external=False): - """ - Return the URL that an unclaimed user should use to claim their account. - Raise `ValueError` if there is no unclaimed_record for the given project ID. - - :param project_id: the project id for the unclaimed record - :param external: absolute url or relative - :returns: the claim url - :raises: ValueError if there is no record for the given project. - """ - - unclaimed_record = self.get_unclaimed_record(project_id) - uid = self._primary_key - base_url = settings.DOMAIN if external else '/' - token = unclaimed_record['token'] - return '{base_url}user/{uid}/{project_id}/claim/?token={token}'.format(**locals()) - - def verify_password_token(self, token): - """ - Verify that the password reset token for this user is valid. - - :param token: the token in verification key - :return `True` if valid, otherwise `False` - """ - - if token and self.verification_key_v2: - try: - return (self.verification_key_v2['token'] == token and - self.verification_key_v2['expires'] > timezone.now()) - except AttributeError: - return False - return False - - def set_password(self, raw_password, notify=True): - """Set the password for this user to the hash of ``raw_password``. - If this is a new user, we're done. If this is a password change, - then email the user about the change and clear all the old sessions - so that users will have to log in again with the new password. - - :param raw_password: the plaintext value of the new password - :param notify: Only meant for unit tests to keep extra notifications from being sent - :rtype: list - :returns: Changed fields from the user save - """ - had_existing_password = bool(self.password and self.is_confirmed) - self.password = generate_password_hash(raw_password) - if self.username == raw_password: - raise ChangePasswordError(['Password cannot be the same as your email address']) - if had_existing_password and notify: - mails.send_mail( - to_addr=self.username, - mail=mails.PASSWORD_RESET, - mimetype='plain', - user=self - ) - remove_sessions_for_user(self) - - def check_password(self, raw_password): - """Return a boolean of whether ``raw_password`` was correct.""" - if not self.password or not raw_password: - return False - return check_password_hash(self.password, raw_password) - - @property - def csl_given_name(self): - parts = [self.given_name] - if self.middle_names: - parts.extend(each[0] for each in re.split(r'\s+', self.middle_names)) - return ' '.join(parts) - - @property - def csl_name(self): - return { - 'family': self.family_name, - 'given': self.csl_given_name, - } - - @property - def created(self): - from website.project.model import Node - return Node.find(Q('creator', 'eq', self._id)) - - # TODO: This should not be on the User object. - def change_password(self, raw_old_password, raw_new_password, raw_confirm_password): - """Change the password for this user to the hash of ``raw_new_password``.""" - raw_old_password = (raw_old_password or '').strip() - raw_new_password = (raw_new_password or '').strip() - raw_confirm_password = (raw_confirm_password or '').strip() - - # TODO: Move validation to set_password - issues = [] - if not self.check_password(raw_old_password): - issues.append('Old password is invalid') - elif raw_old_password == raw_new_password: - issues.append('Password cannot be the same') - elif raw_new_password == self.username: - issues.append('Password cannot be the same as your email address') - if not raw_old_password or not raw_new_password or not raw_confirm_password: - issues.append('Passwords cannot be blank') - elif len(raw_new_password) < 8: - issues.append('Password should be at least eight characters') - elif len(raw_new_password) > 255: - issues.append('Password should not be longer than 255 characters') - - if raw_new_password != raw_confirm_password: - issues.append('Password does not match the confirmation') - - if issues: - raise ChangePasswordError(issues) - self.set_password(raw_new_password) - - def add_unconfirmed_email(self, email, expiration=None, external_identity=None): - """ - Add an email verification token for a given email. - - :param email: the email to confirm - :param email: overwrite default expiration time - :param external_identity: the user's external identity - :return: a token - :raises: ValueError if email already confirmed, except for login through external idp. - """ - - # TODO: This is technically not compliant with RFC 822, which requires - # that case be preserved in the "local-part" of an address. From - # a practical standpoint, the vast majority of email servers do - # not preserve case. - # ref: https://tools.ietf.org/html/rfc822#section-6 - - email = email.lower().strip() - - if not external_identity and email in self.emails: - raise ValueError('Email already confirmed to this user.') - - utils.validate_email(email) - - # If the unconfirmed email is already present, remove it and generate a new one - if email in self.unconfirmed_emails: - self.remove_unconfirmed_email(email) - verification_key = generate_verification_key(verification_type='confirm') - # handle when email_verifications is None - if not self.email_verifications: - self.email_verifications = {} - self.email_verifications[verification_key['token']] = { - 'email': email, - 'confirmed': False, - 'expiration': expiration if expiration else verification_key['expires'], - 'external_identity': external_identity, - } - - return verification_key['token'] - - def remove_unconfirmed_email(self, email): - """Remove an unconfirmed email addresses and their tokens.""" - for token, value in self.email_verifications.iteritems(): - if value.get('email') == email: - del self.email_verifications[token] - return True - - return False - - def remove_email(self, email): - """Remove a confirmed email""" - if email == self.username: - raise PermissionsError("Can't remove primary email") - if email in self.emails: - self.emails.remove(email) - signals.user_email_removed.send(self, email=email) - - @signals.user_email_removed.connect - def _send_email_removal_confirmations(self, email): - mails.send_mail(to_addr=self.username, - mail=mails.REMOVED_EMAIL, - user=self, - removed_email=email, - security_addr='alternate email address ({})'.format(email)) - mails.send_mail(to_addr=email, - mail=mails.REMOVED_EMAIL, - user=self, - removed_email=email, - security_addr='primary email address ({})'.format(self.username)) - - def get_confirmation_token(self, email, force=False, renew=False): - """ - Return the confirmation token for a given email. - - :param str email: The email to get the token for. - :param bool force: If an expired token exists for the given email, generate a new one and return it. - :param bool renew: Generate a new token and return it. - :return Return the confirmation token. - :raises: ExpiredTokenError if trying to access a token that is expired and force=False. - :raises: KeyError if there no token for the email. - """ - - # TODO: Refactor "force" flag into User.get_or_add_confirmation_token - for token, info in self.email_verifications.items(): - if info['email'].lower() == email.lower(): - # Old records will not have an expiration key. If it's missing, assume the token is expired. - expiration = info.get('expiration') - if renew: - new_token = self.add_unconfirmed_email(email) - self.save() - return new_token - if not expiration or (expiration and expiration < timezone.now()): - if not force: - raise ExpiredTokenError('Token for email "{0}" is expired'.format(email)) - else: - new_token = self.add_unconfirmed_email(email) - self.save() - return new_token - return token - raise KeyError('No confirmation token for email "{0}"'.format(email)) - - def get_confirmation_url(self, email, external=True, force=False, renew=False, external_id_provider=None, destination=None): - """ - Return the confirmation url for a given email. - - :param email: The email to confirm. - :param external: Use absolute or relative url. - :param force: If an expired token exists for the given email, generate a new one and return it. - :param renew: Generate a new token and return it. - :param external_id_provider: The external identity provider that authenticates the user. - :param destination: The destination page to redirect after confirmation - :return: Return the confirmation url. - :raises: ExpiredTokenError if trying to access a token that is expired. - :raises: KeyError if there is no token for the email. - """ - - base = settings.DOMAIN if external else '/' - token = self.get_confirmation_token(email, force=force, renew=renew) - external = 'external/' if external_id_provider else '' - destination = '?{}'.format(urllib.urlencode({'destination': destination})) if destination else '' - return '{0}confirm/{1}{2}/{3}/{4}'.format(base, external, self._primary_key, token, destination) - - def get_unconfirmed_email_for_token(self, token): - """Return email if valid. - :rtype: bool - :raises: ExpiredTokenError if trying to access a token that is expired. - :raises: InvalidTokenError if trying to access a token that is invalid. - - """ - if token not in self.email_verifications: - raise InvalidTokenError - - verification = self.email_verifications[token] - # Not all tokens are guaranteed to have expiration dates - if ( - 'expiration' in verification and - verification['expiration'] < timezone.now() - ): - raise ExpiredTokenError - - return verification['email'] - - def clean_email_verifications(self, given_token=None): - email_verifications = deepcopy(self.email_verifications or {}) - for token in self.email_verifications or {}: - try: - self.get_unconfirmed_email_for_token(token) - except (KeyError, ExpiredTokenError): - email_verifications.pop(token) - continue - if token == given_token: - email_verifications.pop(token) - self.email_verifications = email_verifications - - def confirm_email(self, token, merge=False): - """Confirm the email address associated with the token""" - email = self.get_unconfirmed_email_for_token(token) - - # If this email is confirmed on another account, abort - try: - user_to_merge = User.find_one(Q('emails', 'iexact', email)) - except NoResultsFound: - user_to_merge = None - - if user_to_merge and merge: - self.merge_user(user_to_merge) - elif user_to_merge: - raise MergeConfirmedRequiredError( - 'Merge requires confirmation', - user=self, - user_to_merge=user_to_merge, - ) - - # If another user has this email as its username, get it - try: - unregistered_user = User.find_one(Q('username', 'eq', email) & - Q('_id', 'ne', self._id)) - except NoResultsFound: - unregistered_user = None - - if unregistered_user: - self.merge_user(unregistered_user) - self.save() - - if email not in self.emails: - self.emails.append(email) - - # Complete registration if primary email - if email.lower() == self.username.lower(): - self.register(self.username) - self.date_confirmed = timezone.now() - # Revoke token - del self.email_verifications[token] - - # TODO: We can't assume that all unclaimed records are now claimed. - # Clear unclaimed records, so user's name shows up correctly on - # all projects - self.unclaimed_records = {} - self.save() - - self.update_search_nodes() - - return True - - @property - def unconfirmed_emails(self): - # Handle when email_verifications field is None - email_verifications = self.email_verifications or {} - return [ - each['email'] - for each - in email_verifications.values() - ] - - def update_search_nodes(self): - """Call `update_search` on all nodes on which the user is a - contributor. Needed to add self to contributor lists in search upon - registration or claiming. - - """ - for node in self.contributed: - node.update_search() - - def update_search_nodes_contributors(self): - """ - Bulk update contributor name on all nodes on which the user is - a contributor. - :return: - """ - from website.search import search - search.update_contributors_async(self.id) - - def update_affiliated_institutions_by_email_domain(self): - """ - Append affiliated_institutions by email domain. - :return: - """ - # Avoid circular import - from website.project.model import Institution - try: - email_domains = [email.split('@')[1] for email in self.emails] - insts = Institution.find(Q('email_domains', 'in', email_domains)) - for inst in insts: - if inst not in self.affiliated_institutions: - self.affiliated_institutions.append(inst) - except (IndexError, NoResultsFound): - pass - - @property - def is_confirmed(self): - return bool(self.date_confirmed) - - @property - def social_links(self): - social_user_fields = {} - for key, val in self.social.items(): - if val and key in self.SOCIAL_FIELDS: - if not isinstance(val, basestring): - social_user_fields[key] = val - else: - social_user_fields[key] = self.SOCIAL_FIELDS[key].format(val) - return social_user_fields - - @property - def biblio_name(self): - given_names = self.given_name + ' ' + self.middle_names - surname = self.family_name - if surname != given_names: - initials = [ - name[0].upper() + '.' - for name in given_names.split(' ') - if name and re.search(r'\w', name[0], re.I) - ] - return u'{0}, {1}'.format(surname, ' '.join(initials)) - return surname - - @property - def given_name_initial(self): - """ - The user's preferred initialization of their given name. - - Some users with common names may choose to distinguish themselves from - their colleagues in this way. For instance, there could be two - well-known researchers in a single field named "Robert Walker". - "Walker, R" could then refer to either of them. "Walker, R.H." could - provide easy disambiguation. - - NOTE: The internal representation for this should never end with a - period. "R" and "R.H" would be correct in the prior case, but - "R.H." would not. - """ - return self.given_name[0] - - @property - def url(self): - return '/{}/'.format(self._primary_key) - - @property - def api_url(self): - return '/api/v1/profile/{0}/'.format(self._primary_key) - - @property - def absolute_url(self): - return urlparse.urljoin(settings.DOMAIN, self.url) - - @property - def display_absolute_url(self): - url = self.absolute_url - if url is not None: - return re.sub(r'https?:', '', url).strip('/') - - @property - def deep_url(self): - return '/profile/{}/'.format(self._primary_key) - - @property - def unconfirmed_email_info(self): - """Return a list of dictionaries containing information about each of this - user's unconfirmed emails. - """ - unconfirmed_emails = [] - email_verifications = self.email_verifications or [] - for token in email_verifications: - if self.email_verifications[token].get('confirmed', False): - try: - user_merge = User.find_one(Q('emails', 'eq', self.email_verifications[token]['email'].lower())) - except NoResultsFound: - user_merge = False - - unconfirmed_emails.append({'address': self.email_verifications[token]['email'], - 'token': token, - 'confirmed': self.email_verifications[token]['confirmed'], - 'user_merge': user_merge.email if user_merge else False}) - return unconfirmed_emails - - def profile_image_url(self, size=None): - """A generalized method for getting a user's profile picture urls. - We may choose to use some service other than gravatar in the future, - and should not commit ourselves to using a specific service (mostly - an API concern). - - As long as we use gravatar, this is just a proxy to User.gravatar_url - """ - return self._gravatar_url(size) - - def _gravatar_url(self, size): - return filters.gravatar( - self, - use_ssl=True, - size=size - ) - - def get_activity_points(self, db=None): - db = db or framework.mongo.database - return analytics.get_total_activity_count(self._primary_key, db=db) - - def disable_account(self): - """ - Disables user account, making is_disabled true, while also unsubscribing user - from mailchimp emails, remove any existing sessions. - """ - from website import mailchimp_utils - from framework.auth import logout - - try: - mailchimp_utils.unsubscribe_mailchimp( - list_name=settings.MAILCHIMP_GENERAL_LIST, - user_id=self._id, - username=self.username - ) - except mailchimp_utils.mailchimp.ListNotSubscribedError: - pass - except mailchimp_utils.mailchimp.InvalidApiKeyError: - if not settings.ENABLE_EMAIL_SUBSCRIPTIONS: - pass - else: - raise - except mailchimp_utils.mailchimp.EmailNotExistsError: - pass - self.is_disabled = True - - # we must call both methods to ensure the current session is cleared and all existing - # sessions are revoked. - req = get_cache_key() - if isinstance(req, FlaskRequest): - logout() - remove_sessions_for_user(self) - - @property - def is_disabled(self): - """Whether or not this account has been disabled. - - Abstracts ``User.date_disabled``. - - :return: bool - """ - return self.date_disabled is not None - - @is_disabled.setter - def is_disabled(self, val): - """Set whether or not this account has been disabled.""" - if val and not self.date_disabled: - self.date_disabled = timezone.now() - elif val is False: - self.date_disabled = None - - @property - def is_merged(self): - '''Whether or not this account has been merged into another account. - ''' - return self.merged_by is not None - - @property - def profile_url(self): - return '/{}/'.format(self._id) - - @property - def contributed(self): - from website.project.model import Node - return Node.find(Q('contributors', 'eq', self._id)) - - @property - def contributor_to(self): - from website.project.model import Node - return Node.find( - Q('contributors', 'eq', self._id) & - Q('is_deleted', 'ne', True) & - Q('is_collection', 'ne', True) - ) - - @property - def visible_contributor_to(self): - from website.project.model import Node - return Node.find( - Q('contributors', 'eq', self._id) & - Q('is_deleted', 'ne', True) & - Q('is_collection', 'ne', True) & - Q('visible_contributor_ids', 'eq', self._id) - ) - - def get_summary(self, formatter='long'): - return { - 'user_fullname': self.fullname, - 'user_profile_url': self.profile_url, - 'user_display_name': name_formatters[formatter](self), - 'user_is_claimed': self.is_claimed - } - - def save(self, *args, **kwargs): - # TODO: Update mailchimp subscription on username change - # Avoid circular import - first_save = not self._is_loaded - self.username = self.username.lower().strip() if self.username else None - ret = super(User, self).save(*args, **kwargs) - if self.SEARCH_UPDATE_FIELDS.intersection(ret) and self.is_confirmed: - self.update_search() - self.update_search_nodes_contributors() - if first_save: - from website.project import new_bookmark_collection # Avoid circular import - new_bookmark_collection(self) - return ret - - def update_search(self): - from website import search - try: - search.search.update_user(self) - except search.exceptions.SearchUnavailableError as e: - logger.exception(e) - log_exception() - - @classmethod - def find_by_email(cls, email): - try: - user = cls.find_one( - Q('emails', 'eq', email) - ) - return [user] - except: - return [] - - def serialize(self, anonymous=False): - return { - 'id': utils.privacy_info_handle(self._primary_key, anonymous), - 'fullname': utils.privacy_info_handle(self.fullname, anonymous, name=True), - 'registered': self.is_registered, - 'url': utils.privacy_info_handle(self.url, anonymous), - 'api_url': utils.privacy_info_handle(self.api_url, anonymous), - } - - ###### OSF-Specific methods ###### - - def watch(self, watch_config): - """Watch a node by adding its WatchConfig to this user's ``watched`` - list. Raises ``ValueError`` if the node is already watched. - - :param watch_config: The WatchConfig to add. - :param save: Whether to save the user. - - """ - watched_nodes = [each.node for each in self.watched] - if watch_config.node in watched_nodes: - raise ValueError('Node is already being watched.') - watch_config.save() - self.watched.append(watch_config) - return None - - def unwatch(self, watch_config): - """Unwatch a node by removing its WatchConfig from this user's ``watched`` - list. Raises ``ValueError`` if the node is not already being watched. - - :param watch_config: The WatchConfig to remove. - :param save: Whether to save the user. - - """ - for each in self.watched: - if watch_config.node._id == each.node._id: - from framework.transactions.context import TokuTransaction # Avoid circular import - with TokuTransaction(): - # Ensure that both sides of the relationship are removed - each.__class__.remove_one(each) - self.watched.remove(each) - self.save() - return None - raise ValueError('Node not being watched.') - - def is_watching(self, node): - '''Return whether a not a user is watching a Node.''' - watched_node_ids = set([config.node._id for config in self.watched]) - return node._id in watched_node_ids - - def get_recent_log_ids(self, since=None): - '''Return a generator of recent logs' ids. - - :param since: A datetime specifying the oldest time to retrieve logs - from. If ``None``, defaults to 60 days before today. Must be a tz-aware - datetime because PyMongo's generation times are tz-aware. - - :rtype: generator of log ids (strings) - ''' - log_ids = [] - # Default since to 60 days before today if since is None - # timezone aware utcnow - utcnow = timezone.now() - since_date = since or (utcnow - dt.timedelta(days=60)) - for config in self.watched: - # Extract the timestamps for each log from the log_id (fast!) - # The first 4 bytes of Mongo's ObjectId encodes time - # This prevents having to load each Log Object and access their - # date fields - node_log_ids = [log.pk for log in config.node.logs - if bson.ObjectId(log.pk).generation_time > since_date and - log.pk not in log_ids] - # Log ids in reverse chronological order - log_ids = _merge_into_reversed(log_ids, node_log_ids) - return (l_id for l_id in log_ids) - - def get_daily_digest_log_ids(self): - '''Return a generator of log ids generated in the past day - (starting at UTC 00:00). - ''' - utcnow = timezone.now() - midnight = dt.datetime( - utcnow.year, utcnow.month, utcnow.day, - 0, 0, 0, tzinfo=pytz.utc - ) - return self.get_recent_log_ids(since=midnight) - - @property - def can_be_merged(self): - """The ability of the `merge_user` method to fully merge the user""" - return all((addon.can_be_merged for addon in self.get_addons())) - - def merge_user(self, user): - """Merge a registered user into this account. This user will be - a contributor on any project. if the registered user and this account - are both contributors of the same project. Then it will remove the - registered user and set this account to the highest permission of the two - and set this account to be visible if either of the two are visible on - the project. - :param user: A User object to be merged. - """ - # Fail if the other user has conflicts. - if not user.can_be_merged: - raise MergeConflictError('Users cannot be merged') - # Move over the other user's attributes - # TODO: confirm - for system_tag in user.system_tags: - if system_tag not in self.system_tags: - self.system_tags.append(system_tag) - - self.is_claimed = self.is_claimed or user.is_claimed - self.is_invited = self.is_invited or user.is_invited - - # copy over profile only if this user has no profile info - if user.jobs and not self.jobs: - self.jobs = user.jobs - - if user.schools and not self.schools: - self.schools = user.schools - - if user.social and not self.social: - self.social = user.social - - unclaimed = user.unclaimed_records.copy() - unclaimed.update(self.unclaimed_records) - self.unclaimed_records = unclaimed - # - unclaimed records should be connected to only one user - user.unclaimed_records = {} - - security_messages = user.security_messages.copy() - security_messages.update(self.security_messages) - self.security_messages = security_messages - - notifications_configured = user.notifications_configured.copy() - notifications_configured.update(self.notifications_configured) - self.notifications_configured = notifications_configured - - if not settings.RUNNING_MIGRATION: - for key, value in user.mailchimp_mailing_lists.iteritems(): - # subscribe to each list if either user was subscribed - subscription = value or self.mailchimp_mailing_lists.get(key) - signals.user_merged.send(self, list_name=key, subscription=subscription) - - # clear subscriptions for merged user - signals.user_merged.send(user, list_name=key, subscription=False, send_goodbye=False) - - for target_id, timestamp in user.comments_viewed_timestamp.iteritems(): - if not self.comments_viewed_timestamp.get(target_id): - self.comments_viewed_timestamp[target_id] = timestamp - elif timestamp > self.comments_viewed_timestamp[target_id]: - self.comments_viewed_timestamp[target_id] = timestamp - - for email in user.emails: - if email not in self.emails: - self.emails.append(email) - user.emails = [] - - for k, v in user.email_verifications.iteritems(): - email_to_confirm = v['email'] - if k not in self.email_verifications and email_to_confirm != user.username: - self.email_verifications[k] = v - user.email_verifications = {} - - for institution in user.affiliated_institutions: - self.affiliated_institutions.append(institution) - user._affiliated_institutions = [] - - for service in user.external_identity: - for service_id in user.external_identity[service].iterkeys(): - if not (service_id in self.external_identity.get(service, '') and self.external_identity[service][service_id] == 'VERIFIED'): - # Prevent 'CREATE', merging user has already been created. - status = 'VERIFIED' if user.external_identity[service][service_id] == 'VERIFIED' else 'LINK' - if self.external_identity.get(service): - self.external_identity[service].update( - {service_id: status} - ) - else: - self.external_identity[service] = { - service_id: status - } - user.external_identity = {} - - # FOREIGN FIELDS - for watched in user.watched: - if watched not in self.watched: - self.watched.append(watched) - user.watched = [] - - for account in user.external_accounts: - if account not in self.external_accounts: - self.external_accounts.append(account) - user.external_accounts = [] - - # - addons - # Note: This must occur before the merged user is removed as a - # contributor on the nodes, as an event hook is otherwise fired - # which removes the credentials. - for addon in user.get_addons(): - user_settings = self.get_or_add_addon(addon.config.short_name) - user_settings.merge(addon) - user_settings.save() - - # Disconnect signal to prevent emails being sent about being a new contributor when merging users - # be sure to reconnect it at the end of this code block. Import done here to prevent circular import error. - from addons.osfstorage.listeners import checkin_files_by_user - from website.project.signals import contributor_added, contributor_removed - from website.project.views.contributor import notify_added_contributor - from website.util import disconnected_from - - # - projects where the user was a contributor - with disconnected_from(signal=contributor_added, listener=notify_added_contributor): - for node in user.contributed: - # Skip bookmark collection node - if node.is_bookmark_collection: - continue - # if both accounts are contributor of the same project - if node.is_contributor(self) and node.is_contributor(user): - if node.permissions[user._id] > node.permissions[self._id]: - permissions = node.permissions[user._id] - else: - permissions = node.permissions[self._id] - node.set_permissions(user=self, permissions=permissions) - - visible1 = self._id in node.visible_contributor_ids - visible2 = user._id in node.visible_contributor_ids - if visible1 != visible2: - node.set_visible(user=self, visible=True, log=True, auth=Auth(user=self)) - - else: - node.add_contributor( - contributor=self, - permissions=node.get_permissions(user), - visible=node.get_visible(user), - log=False, - send_email='false' - ) - - with disconnected_from(signal=contributor_removed, listener=checkin_files_by_user): - try: - node.remove_contributor( - contributor=user, - auth=Auth(user=self), - log=False, - ) - except ValueError: - logger.error('Contributor {0} not in list on node {1}'.format( - user._id, node._id - )) - - node.save() - - # - projects where the user was the creator - for node in user.created: - node.creator = self - node.save() - - # - file that the user has checked_out, import done here to prevent import error - from website.files.models.base import FileNode - for file_node in FileNode.files_checked_out(user=user): - file_node.checkout = self - file_node.save() - - # finalize the merge - - remove_sessions_for_user(user) - - # - username is set to _id to ensure uniqueness - user.username = user._id - user.password = None - user.verification_key = None - user.osf_mailing_lists = {} - user.merged_by = self - - user.save() - - def get_projects_in_common(self, other_user, primary_keys=True): - """Returns either a collection of "shared projects" (projects that both users are contributors for) - or just their primary keys - """ - if primary_keys: - projects_contributed_to = set(self.contributor_to.get_keys()) - other_projects_primary_keys = set(other_user.contributor_to.get_keys()) - return projects_contributed_to.intersection(other_projects_primary_keys) - else: - projects_contributed_to = set(self.contributor_to) - return projects_contributed_to.intersection(other_user.contributor_to) - - def n_projects_in_common(self, other_user): - """Returns number of "shared projects" (projects that both users are contributors for)""" - return len(self.get_projects_in_common(other_user, primary_keys=True)) - - def is_affiliated_with_institution(self, inst): - return inst in self.affiliated_institutions - - def remove_institution(self, inst_id): - removed = False - for inst in self.affiliated_institutions: - if inst._id == inst_id: - self.affiliated_institutions.remove(inst) - removed = True - return removed - - _affiliated_institutions = fields.ForeignField('node', list=True) - - @property - def affiliated_institutions(self): - from website.institutions.model import Institution, AffiliatedInstitutionsList - return AffiliatedInstitutionsList([Institution(inst) for inst in self._affiliated_institutions], obj=self, private_target='_affiliated_institutions') - - def get_node_comment_timestamps(self, target_id): - """ Returns the timestamp for when comments were last viewed on a node, file or wiki. - """ - default_timestamp = dt.datetime(1970, 1, 1, 12, 0, 0) - return self.comments_viewed_timestamp.get(target_id, default_timestamp) - - def _merge_into_reversed(*iterables): '''Merge multiple sorted inputs into a single output in reverse order. ''' diff --git a/framework/auth/decorators.py b/framework/auth/decorators.py index f15d915ab364..07d60073ffdb 100644 --- a/framework/auth/decorators.py +++ b/framework/auth/decorators.py @@ -11,7 +11,6 @@ from framework.flask import redirect from framework.exceptions import HTTPError from .core import Auth -from .core import User # TODO [CAS-10][OSF-7566]: implement long-term fix for URL preview/prefetch @@ -44,8 +43,9 @@ def must_be_confirmed(func): @functools.wraps(func) def wrapped(*args, **kwargs): + from osf.models import OSFUser - user = User.load(kwargs['uid']) + user = OSFUser.load(kwargs['uid']) if user is not None: if user.is_confirmed: return func(*args, **kwargs) diff --git a/framework/auth/utils.py b/framework/auth/utils.py index 9352ecbee760..9ee3fdb29d67 100644 --- a/framework/auth/utils.py +++ b/framework/auth/utils.py @@ -88,9 +88,9 @@ def privacy_info_handle(info, anonymous, name=False): def ensure_external_identity_uniqueness(provider, identity, user=None): - from framework.auth.core import User # avoid circular import + from osf.models import OSFUser - users_with_identity = User.find(Q('external_identity.{}.{}'.format(provider, identity), 'ne', None)) + users_with_identity = OSFUser.find(Q('external_identity.{}.{}'.format(provider, identity), 'ne', None)) for existing_user in users_with_identity: if user and user._id == existing_user._id: continue diff --git a/framework/auth/views.py b/framework/auth/views.py index 826493898c6b..58755373fe9d 100644 --- a/framework/auth/views.py +++ b/framework/auth/views.py @@ -27,9 +27,8 @@ from framework.flask import redirect # VOL-aware redirect from framework.sessions.utils import remove_sessions_for_user, remove_session from framework.sessions import get_session - +from osf.models import OSFUser as User from website import settings, mails, language -from website.models import User from website.util import web_url_for from website.util.time import throttle_period_expired from website.util.sanitize import strip_html diff --git a/framework/guid/model.py b/framework/guid/model.py deleted file mode 100644 index 7cac5a761f1d..000000000000 --- a/framework/guid/model.py +++ /dev/null @@ -1,99 +0,0 @@ -# -*- coding: utf-8 -*- -import random - -import pymongo -from modularodm import fields - -from framework.mongo import StoredObject - -from modularodm.storage.base import KeyExistsException - -ALPHABET = '23456789abcdefghjkmnpqrstuvwxyz' - - -class BlacklistGuid(StoredObject): - - _id = fields.StringField(primary=True) - - -class Guid(StoredObject): - - __indices__ = [{ - 'unique': False, - 'key_or_list': [('referent.$', pymongo.ASCENDING)] # Forces a mulitkey index h/t @icereval - }] - - _id = fields.StringField(primary=True) - referent = fields.AbstractForeignField() - - @classmethod - def generate(self, referent=None, min_length=5): - while True: - # Create GUID - guid_id = ''.join([random.choice(ALPHABET) for _ in range(min_length)]) - - # Check GUID against blacklist - blacklist_guid = BlacklistGuid.load(guid_id) - if not blacklist_guid: - try: - guid = Guid(_id=guid_id) - guid.save() - break - except KeyExistsException: - pass - if referent: - guid.referent = referent - guid.save() - return guid - - def __repr__(self): - return ''.format(self._id, self.referent._primary_key, self.referent._name) - - -class GuidStoredObject(StoredObject): - """Subclass of `StoredObject` that provisions a `Guid` for each new instance - on save. When saving a `GuidStoredObject` for the first time, creates a new - `Guid`, then assigns the primary key of the instance to the primary key of - the `Guid`. Note: Subclasses should have a `StringField` primary key, since - the key generated by the associated `Guid` will also be a string. - """ - __guid_min_length__ = 5 - - @property - def deep_url(self): - return None - - def _ensure_guid(self): - """Create GUID record if current record doesn't already have one, then - point GUID to self. - """ - # Create GUID with specified ID if provided - if self._primary_key: - - # Done if GUID already exists - guid = Guid.load(self._primary_key) - if guid is not None: - return - - # Create GUID - guid = Guid( - _id=self._primary_key, - referent=self, - ) - guid.save() - - # Else create GUID optimistically - else: - guid = Guid.generate(min_length=self.__guid_min_length__) - guid.referent = (guid._primary_key, self._name) - guid.save() - # Set primary key to GUID key - self._primary_key = guid._primary_key - - def save(self, *args, **kwargs): - """Ensure GUID on save.""" - self._ensure_guid() - return super(GuidStoredObject, self).save(*args, **kwargs) - - def __str__(self): - return str(self._id) diff --git a/framework/sessions/__init__.py b/framework/sessions/__init__.py index bb2d737124b3..a92f02278cd3 100644 --- a/framework/sessions/__init__.py +++ b/framework/sessions/__init__.py @@ -4,9 +4,9 @@ import urllib import urlparse +from django.apps import apps from django.utils import timezone from django.db.models import Q -from django.apps import apps import bson.objectid import itsdangerous from flask import request @@ -15,7 +15,6 @@ from werkzeug.local import LocalProxy from framework.flask import redirect -from framework.sessions.model import Session from framework.sessions.utils import remove_session from website import settings @@ -77,6 +76,7 @@ def prepare_private_key(): def get_session(): + Session = apps.get_model('osf.Session') user_session = sessions.get(request._get_current_object()) if not user_session: user_session = Session() @@ -89,6 +89,7 @@ def set_session(session): def create_session(response, data=None): + Session = apps.get_model('osf.Session') current_session = get_session() if current_session: current_session.data.update(data or {}) @@ -117,6 +118,7 @@ def before_request(): from framework.auth.core import get_user from framework.auth import cas from website.util import time as util_time + Session = apps.get_model('osf.Session') # Central Authentication Server Ticket Validation and Authentication ticket = request.args.get('ticket') diff --git a/framework/sessions/model.py b/framework/sessions/model.py deleted file mode 100644 index ccda7f9de4f9..000000000000 --- a/framework/sessions/model.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- - -from bson import ObjectId -from modularodm import fields - -from framework.mongo import StoredObject - - -class Session(StoredObject): - - _id = fields.StringField(primary=True, default=lambda: str(ObjectId())) - date_created = fields.DateTimeField(auto_now_add=True) - date_modified = fields.DateTimeField(auto_now=True) - data = fields.DictionaryField() - - @property - def is_authenticated(self): - return 'auth_user_id' in self.data - - @property - def is_external_first_login(self): - return 'auth_user_external_first_login' in self.data diff --git a/framework/sessions/utils.py b/framework/sessions/utils.py index b326b64e2798..c76f51a4141e 100644 --- a/framework/sessions/utils.py +++ b/framework/sessions/utils.py @@ -1,9 +1,6 @@ # -*- coding: utf-8 -*- - from modularodm import Q -from framework.sessions.model import Session - def remove_sessions_for_user(user): """ @@ -12,6 +9,7 @@ def remove_sessions_for_user(user): :param user: User :return: """ + from osf.models import Session Session.remove(Q('data.auth_user_id', 'eq', user._id)) @@ -23,4 +21,5 @@ def remove_session(session): :param session: Session :return: """ + from osf.models import Session Session.remove_one(session) diff --git a/osf/management/commands/populate_custom_taxonomies.py b/osf/management/commands/populate_custom_taxonomies.py index 0d45fb981db3..eddc438c4637 100644 --- a/osf/management/commands/populate_custom_taxonomies.py +++ b/osf/management/commands/populate_custom_taxonomies.py @@ -8,7 +8,7 @@ from osf.models import PreprintProvider, PreprintService, Subject from scripts import utils as script_utils -from website.project.taxonomies import validate_subject_hierarchy +from osf.models.validators import validate_subject_hierarchy logger = logging.getLogger(__name__) diff --git a/osf/models/__init__.py b/osf/models/__init__.py index c6ecee3f0a85..eaf2369a4286 100644 --- a/osf/models/__init__.py +++ b/osf/models/__init__.py @@ -10,7 +10,7 @@ from osf.models.nodelog import NodeLog # noqa from osf.models.tag import Tag # noqa from osf.models.comment import Comment # noqa -from osf.models.conference import Conference # noqa +from osf.models.conference import Conference, MailRecord # noqa from osf.models.citation import AlternativeCitation, CitationStyle # noqa from osf.models.archive import ArchiveJob, ArchiveTarget # noqa from osf.models.queued_mail import QueuedMail # noqa @@ -19,6 +19,7 @@ from osf.models.licenses import NodeLicense, NodeLicenseRecord # noqa from osf.models.private_link import PrivateLink # noqa from osf.models.notifications import NotificationDigest, NotificationSubscription # noqa +from osf.models.spam import SpamStatus, SpamMixin # noqa from osf.models.subject import Subject # noqa from osf.models.preprint_provider import PreprintProvider # noqa from osf.models.preprint_service import PreprintService # noqa diff --git a/osf/models/external.py b/osf/models/external.py index 1418de1abd9f..69a6763f8617 100644 --- a/osf/models/external.py +++ b/osf/models/external.py @@ -460,3 +460,41 @@ def has_expired_credentials(self): if self.expiry_time and self.account.expires_at: return (timezone.now() - self.account.expires_at).total_seconds() > self.expiry_time return False + +class BasicAuthProviderMixin(object): + """ + Providers utilizing BasicAuth can utilize this class to implement the + storage providers framework by subclassing this mixin. This provides + a translation between the oauth parameters and the BasicAuth parameters. + + The password here is kept decrypted by default. + """ + + def __init__(self, account=None, host=None, username=None, password=None): + super(BasicAuthProviderMixin, self).__init__() + if account: + self.account = account + elif not account and host and password and username: + self.account = ExternalAccount( + display_name=username, + oauth_key=password, + oauth_secret=host.lower(), + provider_id='{}:{}'.format(host.lower(), username), + profile_url=host.lower(), + provider=self.short_name, + provider_name=self.name + ) + else: + self.account = None + + @property + def host(self): + return self.account.profile_url + + @property + def username(self): + return self.account.display_name + + @property + def password(self): + return self.account.oauth_key diff --git a/osf/models/licenses.py b/osf/models/licenses.py index 5bfac77178c4..bdb141a6974f 100644 --- a/osf/models/licenses.py +++ b/osf/models/licenses.py @@ -8,11 +8,11 @@ def _serialize(fields, instance): return { - field: getattr(instance, field) + field: getattr(instance, field if field != 'id' else 'license_id') for field in fields } -serialize_node_license = functools.partial(_serialize, ('_id', 'name', 'text')) +serialize_node_license = functools.partial(_serialize, ('id', 'name', 'text')) def serialize_node_license_record(node_license_record): if node_license_record is None: diff --git a/osf/models/preprint_service.py b/osf/models/preprint_service.py index 17354d0f5efd..91046a35f95e 100644 --- a/osf/models/preprint_service.py +++ b/osf/models/preprint_service.py @@ -8,12 +8,11 @@ from framework.celery_tasks.handlers import enqueue_task from framework.exceptions import PermissionsError -from osf.models.subject import Subject +from osf.models import NodeLog, Subject +from osf.models.validators import validate_subject_hierarchy from osf.utils.fields import NonNaiveDateTimeField from website.preprints.tasks import on_preprint_updated -from website.project.model import NodeLog from website.project.licenses import set_license -from website.project.taxonomies import validate_subject_hierarchy from website.util import api_v2_url from website.util.permissions import ADMIN from website import settings diff --git a/osf/models/sanctions.py b/osf/models/sanctions.py index 6b69d611815d..002f4cf5fbf2 100644 --- a/osf/models/sanctions.py +++ b/osf/models/sanctions.py @@ -491,7 +491,7 @@ def _email_template_context(self, return context def _on_reject(self, user): - from website.project.model import NodeLog + NodeLog = apps.get_model('osf.NodeLog') parent_registration = self._get_registration() parent_registration.registered_from.add_log( @@ -811,7 +811,7 @@ def _on_complete(self, user): self.save() def _on_reject(self, user): - from website.project.model import NodeLog + NodeLog = apps.get_model('osf.NodeLog') register = self._get_registration() registered_from = register.registered_from diff --git a/osf/models/spam.py b/osf/models/spam.py index 6a673ab9396f..8e25f5e627c3 100644 --- a/osf/models/spam.py +++ b/osf/models/spam.py @@ -8,7 +8,6 @@ from osf.utils.fields import NonNaiveDateTimeField from website import settings -from website.project.model import User from website.util import akismet from website.util.akismet import AkismetClientError @@ -24,8 +23,9 @@ def _get_client(): def _validate_reports(value, *args, **kwargs): + from osf.models import OSFUser for key, val in value.iteritems(): - if not User.load(key): + if not OSFUser.load(key): raise ValidationValueError('Keys must be user IDs') if not isinstance(val, dict): raise ValidationTypeError('Values must be dictionaries') diff --git a/osf/models/validators.py b/osf/models/validators.py index ddf6359413b4..119d8ea211cf 100644 --- a/osf/models/validators.py +++ b/osf/models/validators.py @@ -102,6 +102,35 @@ def validate_subject_provider_mapping(provider, mapping): if not mapping and provider._id != 'osf': raise DjangoValidationError('Invalid PreprintProvider / Subject alias mapping.') +def validate_subject_hierarchy(subject_hierarchy): + from osf.models import Subject + validated_hierarchy, raw_hierarchy = [], set(subject_hierarchy) + for subject_id in subject_hierarchy: + subject = Subject.load(subject_id) + if not subject: + raise ValidationValueError('Subject with id <{}> could not be found.'.format(subject_id)) + + if subject.parent: + continue + + raw_hierarchy.remove(subject_id) + validated_hierarchy.append(subject._id) + + while raw_hierarchy: + if not set(subject.children.values_list('_id', flat=True)) & raw_hierarchy: + raise ValidationValueError('Invalid subject hierarchy: {}'.format(subject_hierarchy)) + else: + for child in subject.children.filter(_id__in=raw_hierarchy): + subject = child + validated_hierarchy.append(child._id) + raw_hierarchy.remove(child._id) + break + if set(validated_hierarchy) == set(subject_hierarchy): + return + else: + raise ValidationValueError('Invalid subject hierarchy: {}'.format(subject_hierarchy)) + raise ValidationValueError('Unable to find root subject in {}'.format(subject_hierarchy)) + @deconstructible class CommentMaxLength(object): diff --git a/osf_tests/conftest.py b/osf_tests/conftest.py index 4340002fcbcf..801dcc2b298a 100644 --- a/osf_tests/conftest.py +++ b/osf_tests/conftest.py @@ -6,7 +6,7 @@ from framework.django.handlers import handlers as django_handlers from framework.flask import rm_handlers from website import settings -from website.app import init_app, patch_models +from website.app import init_app from website.project.signals import contributor_added from website.project.views.contributor import notify_added_contributor @@ -33,11 +33,6 @@ logging.getLogger(logger_name).setLevel(logging.CRITICAL) -@pytest.fixture(autouse=True, scope='session') -def patched_models(): - patch_models(settings) - - # NOTE: autouse so that ADDONS_REQUESTED gets set on website.settings @pytest.fixture(autouse=True, scope='session') def app(): diff --git a/osf_tests/test_analytics.py b/osf_tests/test_analytics.py index 56766ef47b30..3293de2f0b1d 100644 --- a/osf_tests/test_analytics.py +++ b/osf_tests/test_analytics.py @@ -14,7 +14,7 @@ from framework import analytics, sessions from framework.sessions import session -from osf.models import PageCounter +from osf.models import PageCounter, Session from tests.base import OsfTestCase from osf_tests.factories import UserFactory, ProjectFactory @@ -51,7 +51,7 @@ def setUp(self): self.ctx = decoratorapp.test_request_context() self.ctx.push() # TODO: Think of something better @sloria @jmcarp - sessions.set_session(sessions.Session()) + sessions.set_session(Session()) def tearDown(self): self.ctx.pop() diff --git a/osf_tests/test_archiver.py b/osf_tests/test_archiver.py index af386b057ffd..30d4f661485c 100644 --- a/osf_tests/test_archiver.py +++ b/osf_tests/test_archiver.py @@ -43,7 +43,7 @@ from website import settings from website.util import waterbutler_url_for from website.util.sanitize import strip_html -from website.project.model import MetaSchema +from osf.models import MetaSchema from addons.base.models import BaseStorageAddon from osf_tests import factories @@ -1150,14 +1150,14 @@ def error(*args, **kwargs): class TestArchiverBehavior(OsfTestCase): - @mock.patch('website.project.model.Node.update_search') + @mock.patch('osf.models.AbstractNode.update_search') def test_archiving_registrations_not_added_to_search_before_archival(self, mock_update_search): proj = factories.ProjectFactory() reg = factories.RegistrationFactory(project=proj) reg.save() assert_false(mock_update_search.called) - @mock.patch('website.project.model.Node.update_search') + @mock.patch('osf.models.AbstractNode.update_search') @mock.patch('website.mails.send_mail') @mock.patch('website.archiver.tasks.archive_success.delay') def test_archiving_nodes_added_to_search_on_archive_success_if_public(self, mock_update_search, mock_send, mock_archive_success): @@ -1184,13 +1184,13 @@ def test_archiving_nodes_not_added_to_search_on_archive_failure(self, mock_send, listeners.archive_callback(reg) assert_true(mock_delete_index_node.called) - @mock.patch('website.project.model.Node.update_search') + @mock.patch('osf.models.AbstractNode.update_search') @mock.patch('website.mails.send_mail') def test_archiving_nodes_not_added_to_search_on_archive_incomplete(self, mock_send, mock_update_search): proj = factories.ProjectFactory() reg = factories.RegistrationFactory(project=proj) reg.save() - with mock.patch('website.archiver.model.ArchiveJob.archive_tree_finished', mock.Mock(return_value=False)): + with mock.patch('osf.models.ArchiveJob.archive_tree_finished', mock.Mock(return_value=False)): listeners.archive_callback(reg) assert_false(mock_update_search.called) diff --git a/osf_tests/test_elastic_search.py b/osf_tests/test_elastic_search.py index 7dc39a6c3f63..2df5252ad9c4 100644 --- a/osf_tests/test_elastic_search.py +++ b/osf_tests/test_elastic_search.py @@ -17,7 +17,7 @@ from website.search import elastic_search from website.search.util import build_query from website.search_migration.migrate import migrate -from website.models import Retraction, NodeLicense, Tag +from osf.models import Retraction, NodeLicense, Tag from addons.osfstorage.models import OsfStorageFile from scripts.populate_institutions import main as populate_institutions diff --git a/osf_tests/test_guid_annotations.py b/osf_tests/test_guid_annotations.py index c1b9828072d1..bb009ffcbcd6 100644 --- a/osf_tests/test_guid_annotations.py +++ b/osf_tests/test_guid_annotations.py @@ -4,7 +4,7 @@ from bulk_update.helper import bulk_update from osf.models import OSFUser -from django.db.models import CharField, Max, DateTimeField +from django.db.models import Max, DateTimeField from osf_tests.factories import UserFactory, PreprintFactory diff --git a/osf_tests/test_node.py b/osf_tests/test_node.py index 9a2591a8c098..3fabb9a8ee5b 100644 --- a/osf_tests/test_node.py +++ b/osf_tests/test_node.py @@ -1805,13 +1805,13 @@ def test_set_privacy(self, node, auth): assert node.logs.first().action == NodeLog.MADE_PRIVATE assert node.keenio_read_key == '' - @mock.patch('website.mails.queue_mail') + @mock.patch('osf.models.queued_mail.queue_mail') def test_set_privacy_sends_mail_default(self, mock_queue, node, auth): node.set_privacy('private', auth=auth) node.set_privacy('public', auth=auth) assert mock_queue.call_count == 1 - @mock.patch('website.mails.queue_mail') + @mock.patch('osf.models.queued_mail.queue_mail') def test_set_privacy_sends_mail(self, mock_queue, node, auth): node.set_privacy('private', auth=auth) node.set_privacy('public', auth=auth, meeting_creation=False) @@ -1884,8 +1884,8 @@ def test_check_spam_only_public_node_by_default(self, project, user): @mock.patch.object(settings, 'SPAM_CHECK_ENABLED', True) def test_check_spam_skips_ham_user(self, project, user): - with mock.patch('website.project.model.Node._get_spam_content', mock.Mock(return_value='some content!')): - with mock.patch('website.project.model.Node.do_check_spam', mock.Mock(side_effect=Exception('should not get here'))): + with mock.patch('osf.models.AbstractNode._get_spam_content', mock.Mock(return_value='some content!')): + with mock.patch('osf.models.AbstractNode.do_check_spam', mock.Mock(side_effect=Exception('should not get here'))): user.add_system_tag('ham_confirmed') project.set_privacy('public') assert project.check_spam(user, None, None) is False @@ -1900,14 +1900,14 @@ def test_check_spam_on_private_node(self, project, user): project.set_privacy('private') assert project.check_spam(user, None, None) is True - @mock.patch('website.project.model.mails.send_mail') + @mock.patch('osf.models.node.mails.send_mail') @mock.patch.object(settings, 'SPAM_CHECK_ENABLED', True) @mock.patch.object(settings, 'SPAM_ACCOUNT_SUSPENSION_ENABLED', True) def test_check_spam_on_private_node_bans_new_spam_user(self, mock_send_mail, project, user): project.is_public = False project.save() - with mock.patch('website.project.model.Node._get_spam_content', mock.Mock(return_value='some content!')): - with mock.patch('website.project.model.Node.do_check_spam', mock.Mock(return_value=True)): + with mock.patch('osf.models.AbstractNode._get_spam_content', mock.Mock(return_value='some content!')): + with mock.patch('osf.models.AbstractNode.do_check_spam', mock.Mock(return_value=True)): user.date_confirmed = timezone.now() project.set_privacy('public') user2 = UserFactory() @@ -1928,14 +1928,14 @@ def test_check_spam_on_private_node_bans_new_spam_user(self, mock_send_mail, pro project3.reload() assert project3.is_public is True - @mock.patch('website.project.model.mails.send_mail') + @mock.patch('osf.models.node.mails.send_mail') @mock.patch.object(settings, 'SPAM_CHECK_ENABLED', True) @mock.patch.object(settings, 'SPAM_ACCOUNT_SUSPENSION_ENABLED', True) def test_check_spam_on_private_node_does_not_ban_existing_user(self, mock_send_mail, project, user): project.is_public = False project.save() - with mock.patch('website.project.model.Node._get_spam_content', mock.Mock(return_value='some content!')): - with mock.patch('website.project.model.Node.do_check_spam', mock.Mock(return_value=True)): + with mock.patch('osf.models.AbstractNode._get_spam_content', mock.Mock(return_value='some content!')): + with mock.patch('osf.models.AbstractNode.do_check_spam', mock.Mock(return_value=True)): project.creator.date_confirmed = timezone.now() - datetime.timedelta(days=9001) project.set_privacy('public') assert project.check_spam(user, None, None) is True diff --git a/osf_tests/test_prereg.py b/osf_tests/test_prereg.py index 8a8a34593029..797b171ed8e8 100644 --- a/osf_tests/test_prereg.py +++ b/osf_tests/test_prereg.py @@ -2,9 +2,10 @@ from modularodm import Q +from osf.models import MetaSchema from website.prereg import prereg_landing_page as landing_page from website.prereg.utils import drafts_for_user, get_prereg_schema -from website.project.model import ensure_schemas, MetaSchema +from website.project.model import ensure_schemas from tests.base import OsfTestCase from osf_tests import factories diff --git a/osf_tests/test_private_link.py b/osf_tests/test_private_link.py index 5eff8fc5e371..e883144542f7 100644 --- a/osf_tests/test_private_link.py +++ b/osf_tests/test_private_link.py @@ -2,7 +2,6 @@ from website.project import new_private_link from website.project.model import ensure_schemas -from website.project.views.node import remove_private_link from .factories import PrivateLinkFactory, NodeFactory from osf.models import MetaSchema, DraftRegistration, NodeLog diff --git a/osf_tests/test_project_decorators.py b/osf_tests/test_project_decorators.py index 8412304c7e56..bd0e0b88563b 100644 --- a/osf_tests/test_project_decorators.py +++ b/osf_tests/test_project_decorators.py @@ -4,7 +4,7 @@ from website.project.decorators import must_be_valid_project -from website.project.sanctions import Sanction +from osf.models import Sanction from tests.base import OsfTestCase from osf_tests.factories import ProjectFactory, NodeFactory, RetractionFactory, CollectionFactory, RegistrationFactory diff --git a/osf_tests/test_queued_mail.py b/osf_tests/test_queued_mail.py index e3ce48b82abd..d9171a04a847 100644 --- a/osf_tests/test_queued_mail.py +++ b/osf_tests/test_queued_mail.py @@ -7,8 +7,10 @@ from .factories import UserFactory, NodeFactory -from osf.models.queued_mail import queue_mail -from website import mails +from osf.models.queued_mail import ( + queue_mail, WELCOME_OSF4M, + NO_LOGIN, NO_ADDON, NEW_PUBLIC_PROJECT +) @pytest.fixture() def user(): @@ -30,14 +32,14 @@ def queue_mail(self, mail, user, send_at=None, **kwargs): @mock.patch('osf.models.queued_mail.send_mail') def test_no_login_presend_for_active_user(self, mock_mail, user): - mail = self.queue_mail(mail=mails.NO_LOGIN, user=user) + mail = self.queue_mail(mail=NO_LOGIN, user=user) user.date_last_login = timezone.now() + dt.timedelta(seconds=10) user.save() assert mail.send_mail() is False @mock.patch('osf.models.queued_mail.send_mail') def test_no_login_presend_for_inactive_user(self, mock_mail, user): - mail = self.queue_mail(mail=mails.NO_LOGIN, user=user) + mail = self.queue_mail(mail=NO_LOGIN, user=user) user.date_last_login = timezone.now() - dt.timedelta(weeks=10) user.save() assert timezone.now() - dt.timedelta(days=1) > user.date_last_login @@ -45,13 +47,13 @@ def test_no_login_presend_for_inactive_user(self, mock_mail, user): @mock.patch('osf.models.queued_mail.send_mail') def test_no_addon_presend(self, mock_mail, user): - mail = self.queue_mail(mail=mails.NO_ADDON, user=user) + mail = self.queue_mail(mail=NO_ADDON, user=user) assert mail.send_mail() is True @mock.patch('osf.models.queued_mail.send_mail') def test_new_public_project_presend_for_no_project(self, mock_mail, user): mail = self.queue_mail( - mail=mails.NEW_PUBLIC_PROJECT, + mail=NEW_PUBLIC_PROJECT, user=user, project_title='Oh noes', nid='', @@ -62,32 +64,31 @@ def test_new_public_project_presend_for_no_project(self, mock_mail, user): def test_new_public_project_presend_success(self, mock_mail, user): node = NodeFactory(is_public=True) mail = self.queue_mail( - mail=mails.NEW_PUBLIC_PROJECT, + mail=NEW_PUBLIC_PROJECT, user=user, project_title='Oh yass', nid=node._id ) assert bool(mail.send_mail()) is True - # TODO: Uncomment when FileNodeModel is implemented - # @mock.patch('osf.models.queued_mail.send_mail') - # def test_welcome_osf4m_presend(self, mock_mail, user): - # user.date_last_login = timezone.now() - dt.timedelta(days=13) - # user.save() - # mail = self.queue_mail( - # mail=mails.WELCOME_OSF4M, - # user=user, - # conference='Buttjamz conference', - # fid='' - # ) - # assert bool(mail.send_mail()) is True - # assert mail.data['downloads'] == 0 + @mock.patch('osf.models.queued_mail.send_mail') + def test_welcome_osf4m_presend(self, mock_mail, user): + user.date_last_login = timezone.now() - dt.timedelta(days=13) + user.save() + mail = self.queue_mail( + mail=WELCOME_OSF4M, + user=user, + conference='Buttjamz conference', + fid='' + ) + assert bool(mail.send_mail()) is True + assert mail.data['downloads'] == 0 @mock.patch('osf.models.queued_mail.send_mail') def test_finding_other_emails_sent_to_user(self, mock_mail, user): mail = self.queue_mail( user=user, - mail=mails.NO_ADDON, + mail=NO_ADDON, ) assert len(mail.find_sent_of_same_type_and_user()) == 0 mail.send_mail() @@ -97,7 +98,7 @@ def test_finding_other_emails_sent_to_user(self, mock_mail, user): def test_user_is_active(self, mock_mail, user): mail = self.queue_mail( user=user, - mail=mails.NO_ADDON, + mail=NO_ADDON, ) assert bool(mail.send_mail()) is True @@ -108,7 +109,7 @@ def test_user_is_not_active_no_password(self, mock_mail): user.save() mail = self.queue_mail( user=user, - mail=mails.NO_ADDON, + mail=NO_ADDON, ) assert mail.send_mail() is False @@ -117,7 +118,7 @@ def test_user_is_not_active_not_registered(self, mock_mail): user = UserFactory(is_registered=False) mail = self.queue_mail( user=user, - mail=mails.NO_ADDON, + mail=NO_ADDON, ) assert mail.send_mail() is False @@ -127,7 +128,7 @@ def test_user_is_not_active_is_merged(self, mock_mail): user = UserFactory(merged_by=other_user) mail = self.queue_mail( user=user, - mail=mails.NO_ADDON, + mail=NO_ADDON, ) assert mail.send_mail() is False @@ -136,7 +137,7 @@ def test_user_is_not_active_is_disabled(self, mock_mail): user = UserFactory(date_disabled=timezone.now()) mail = self.queue_mail( user=user, - mail=mails.NO_ADDON, + mail=NO_ADDON, ) assert mail.send_mail() is False @@ -145,6 +146,6 @@ def test_user_is_not_active_is_not_confirmed(self, mock_mail): user = UserFactory(date_confirmed=None) mail = self.queue_mail( user=user, - mail=mails.NO_ADDON, + mail=NO_ADDON, ) assert mail.send_mail() is False diff --git a/osf_tests/test_registrations.py b/osf_tests/test_registrations.py index 719e06f1c01a..225396e164d7 100644 --- a/osf_tests/test_registrations.py +++ b/osf_tests/test_registrations.py @@ -21,7 +21,7 @@ @pytest.fixture(autouse=True) -def _ensure_schemas(patched_models): +def _ensure_schemas(): return ensure_schemas() diff --git a/osf_tests/test_sanctions.py b/osf_tests/test_sanctions.py index 9ca59f5efbd6..f10b9edaf425 100644 --- a/osf_tests/test_sanctions.py +++ b/osf_tests/test_sanctions.py @@ -7,7 +7,7 @@ from django.utils import timezone from osf.modm_compat import Q -from osf.models import DraftRegistrationApproval, MetaSchema +from osf.models import DraftRegistrationApproval, MetaSchema, NodeLog from osf_tests import factories from osf_tests.utils import mock_archive @@ -15,7 +15,6 @@ from website import settings from website.exceptions import NodeStateError -from website.project.model import NodeLog from website.project.model import ensure_schemas diff --git a/scripts/add_conference.py b/scripts/add_conference.py index 69f18ed0890d..37ffd7cb8ea5 100644 --- a/scripts/add_conference.py +++ b/scripts/add_conference.py @@ -2,7 +2,7 @@ # encoding: utf-8 from website.conferences.model import Conference -from website.models import User +from osf.models import OSFUser as User from modularodm import Q from modularodm.exceptions import ModularOdmException from website.app import init_app diff --git a/scripts/analytics/addon_snapshot.py b/scripts/analytics/addon_snapshot.py index 1e644cdc7b6b..5d23d56ca7e4 100644 --- a/scripts/analytics/addon_snapshot.py +++ b/scripts/analytics/addon_snapshot.py @@ -4,9 +4,10 @@ from modularodm import Q from website.app import init_app -from website.models import Node, User +from osf.models import OSFUser as User, AbstractNode as Node from framework.mongo.utils import paginated from scripts.analytics.base import SnapshotAnalytics +from website.settings import ADDONS_AVAILABLE logger = logging.getLogger(__name__) logging.basicConfig(level=logging.INFO) @@ -30,12 +31,12 @@ def get_enabled_authorized_linked(user_settings_list, has_external_account, shor # osfstorage and wiki don't have user_settings, so always assume they're enabled, authorized, linked if short_name == 'osfstorage' or short_name == 'wiki': num_enabled = num_authorized = num_linked = User.find( - Q('is_registered', 'eq', True) & - Q('password', 'ne', None) & - Q('merged_by', 'eq', None) & - Q('date_disabled', 'eq', None) & - Q('date_confirmed', 'ne', None) - ).count() + Q('is_registered', 'eq', True) & + Q('password', 'ne', None) & + Q('merged_by', 'eq', None) & + Q('date_disabled', 'eq', None) & + Q('date_confirmed', 'ne', None) + ).count() elif short_name == 'forward': num_enabled = num_authorized = ForwardNodeSettings.find().count() @@ -71,8 +72,6 @@ def collection_name(self): def get_events(self, date=None): super(AddonSnapshot, self).get_events(date) - from addons.base.models import BaseNodeSettings - from website.settings import ADDONS_AVAILABLE counts = [] addons_available = {k: v for k, v in [(addon.short_name, addon) for addon in ADDONS_AVAILABLE]} diff --git a/scripts/analytics/institution_summary.py b/scripts/analytics/institution_summary.py index b83d62fae1b6..7a8db782051c 100644 --- a/scripts/analytics/institution_summary.py +++ b/scripts/analytics/institution_summary.py @@ -4,8 +4,8 @@ from dateutil.parser import parse from datetime import datetime, timedelta +from osf.models import OSFUser as User, AbstractNode as Node, Institution from website.app import init_app -from website.models import User, Node, Institution from scripts.analytics.base import SummaryAnalytics @@ -49,7 +49,7 @@ def get_events(self, date): project_public_query = project_query & public_query project_private_query = project_query & private_query count = { - 'institution':{ + 'institution': { 'id': institution._id, 'name': institution.name, }, @@ -57,7 +57,7 @@ def get_events(self, date): 'total': User.find(user_query).count(), }, 'nodes': { - 'total':AbstractNode.find_by_institutions(institution, node_query).count(), + 'total': AbstractNode.find_by_institutions(institution, node_query).count(), 'public': AbstractNode.find_by_institutions(institution, node_public_query).count(), 'private': AbstractNode.find_by_institutions(institution, node_private_query).count(), }, diff --git a/scripts/analytics/node_log_events.py b/scripts/analytics/node_log_events.py index 42f854c85321..20e2531d0095 100644 --- a/scripts/analytics/node_log_events.py +++ b/scripts/analytics/node_log_events.py @@ -5,7 +5,7 @@ from datetime import datetime, timedelta from website.app import init_app -from website.project.model import NodeLog +from osf.models import NodeLog from framework.mongo.utils import paginated from scripts.analytics.base import EventAnalytics diff --git a/scripts/analytics/user_domain_events.py b/scripts/analytics/user_domain_events.py index 2c5f1a8dc90a..3b56cce2a2e4 100644 --- a/scripts/analytics/user_domain_events.py +++ b/scripts/analytics/user_domain_events.py @@ -4,7 +4,7 @@ from dateutil.parser import parse from datetime import datetime, timedelta -from website.models import User +from osf.models import OSFUser from website.app import init_app from framework.mongo.utils import paginated from scripts.analytics.base import EventAnalytics @@ -34,7 +34,7 @@ def get_events(self, date): user_query = (Q('date_confirmed', 'lt', date + timedelta(1)) & Q('date_confirmed', 'gte', date) & Q('username', 'ne', None)) - users = paginated(User, query=user_query) + users = paginated(OSFUser, query=user_query) user_domain_events = [] for user in users: user_date = user.date_confirmed.replace(tzinfo=pytz.UTC) diff --git a/scripts/analytics/user_summary.py b/scripts/analytics/user_summary.py index 7452fb1d089b..cb724e3f0d9e 100644 --- a/scripts/analytics/user_summary.py +++ b/scripts/analytics/user_summary.py @@ -5,8 +5,8 @@ from modularodm import Q +from osf.models import OSFUser as User, NodeLog from website.app import init_app -from website.models import User, NodeLog from framework.mongo.utils import paginated from scripts.analytics.base import SummaryAnalytics diff --git a/scripts/annotate_rsvps.py b/scripts/annotate_rsvps.py index 61d66b65a6e8..2cf2c6d92be9 100644 --- a/scripts/annotate_rsvps.py +++ b/scripts/annotate_rsvps.py @@ -18,7 +18,8 @@ from modularodm import Q from modularodm.exceptions import ModularOdmException -from website.models import User, Node, NodeLog +from osf.models import OSFUser as User +from website.models import Node, NodeLog logging.basicConfig(level=logging.INFO) diff --git a/scripts/approve_embargo_terminations.py b/scripts/approve_embargo_terminations.py index 58c402ea79fe..d266f0401e3c 100644 --- a/scripts/approve_embargo_terminations.py +++ b/scripts/approve_embargo_terminations.py @@ -9,17 +9,18 @@ Makes the Embargoed Node and its components public. """ -import datetime import logging -import sys +import django from django.utils import timezone from django.db import transaction from modularodm import Q +django.setup() from framework.celery_tasks import app as celery_app -from website import models, settings +from osf import models +from website import settings from website.app import init_app from scripts import utils as scripts_utils diff --git a/scripts/approve_registrations.py b/scripts/approve_registrations.py index 9a031e5ccf72..aadf91306d5c 100644 --- a/scripts/approve_registrations.py +++ b/scripts/approve_registrations.py @@ -3,16 +3,18 @@ """ import logging -import datetime +import django from django.utils import timezone from django.db import transaction from modularodm import Q +django.setup() from framework.celery_tasks import app as celery_app +from osf import models from website.app import init_app -from website import models, settings +from website import settings from scripts import utils as scripts_utils @@ -61,4 +63,3 @@ def run_main(dry_run=True): if not dry_run: scripts_utils.add_file_logger(logger, __file__) main(dry_run=dry_run) - diff --git a/scripts/cleanup_failed_registrations.py b/scripts/cleanup_failed_registrations.py index 606d104e3bd5..5f34075a28df 100644 --- a/scripts/cleanup_failed_registrations.py +++ b/scripts/cleanup_failed_registrations.py @@ -1,10 +1,11 @@ # -*- coding: utf-8 -*- import sys -from datetime import datetime import logging +import django from django.utils import timezone from modularodm import Q +django.setup() from website.archiver import ( ARCHIVER_INITIATED @@ -16,7 +17,7 @@ ) from website.settings import ARCHIVE_TIMEOUT_TIMEDELTA -from website.archiver.model import ArchiveJob +from osf.models import ArchiveJob from website.app import init_app diff --git a/scripts/embargo_registrations.py b/scripts/embargo_registrations.py index 42c1f23d418e..95ddfebcc152 100644 --- a/scripts/embargo_registrations.py +++ b/scripts/embargo_registrations.py @@ -4,17 +4,18 @@ """ import logging -import datetime +import django from django.utils import timezone from django.db import transaction from modularodm import Q +django.setup() from framework.celery_tasks import app as celery_app from website.app import init_app from website import models, settings -from website.project.model import NodeLog +from osf.models import NodeLog from scripts import utils as scripts_utils diff --git a/scripts/generate_sitemap.py b/scripts/generate_sitemap.py index 2d521f8cd691..f2db7880f41a 100644 --- a/scripts/generate_sitemap.py +++ b/scripts/generate_sitemap.py @@ -4,23 +4,19 @@ import boto3 import datetime import gzip -import math import os import shutil import sys -import urllib import urlparse import xml import django django.setup() -from django.db import transaction import logging from framework import sentry from framework.celery_tasks import app as celery_app -from osf.models import OSFUser, AbstractNode, Registration -from osf.models.preprint_service import PreprintService +from osf.models import OSFUser, AbstractNode, PreprintService from scripts import utils as script_utils from website import settings from website.app import init_app @@ -31,12 +27,12 @@ class Progress(object): def __init__(self, bar_len=50): self.bar_len = bar_len - + def start(self, total, prefix): self.total = total self.count = 0 self.prefix = prefix - + def increment(self, inc=1): self.count += inc filled_len = int(round(self.bar_len * self.count / float(self.total))) @@ -136,7 +132,7 @@ def write_sitemap_index(self): for f in range(self.sitemap_count): sitemap = doc.createElement('sitemap') sitemap_index.appendChild(sitemap) - + loc = doc.createElement('loc') sitemap.appendChild(loc) loc_text = self.doc.createTextNode(urlparse.urljoin(settings.DOMAIN, 'sitemaps/sitemap_{}.xml.gz'.format(str(f)))) @@ -194,7 +190,7 @@ def generate(self): progress.stop() # AbstractNode urls (Nodes and Registrations, no colelctions) - objs = AbstractNode.objects.filter(is_public=True, is_deleted=False, retraction_id__isnull=True).exclude(type="osf.collection") + objs = AbstractNode.objects.filter(is_public=True, is_deleted=False, retraction_id__isnull=True).exclude(type="osf.collection") progress.start(objs.count(), 'NODE: ') for obj in objs.iterator(): try: @@ -221,12 +217,14 @@ def generate(self): # Preprint file urls try: file_config = settings.SITEMAP_PREPRINT_FILE_CONFIG - file_config['loc'] = urlparse.urljoin(settings.DOMAIN, - os.path.join('project', - obj.primary_file.node._id, # Parent node id + file_config['loc'] = urlparse.urljoin( + settings.DOMAIN, + os.path.join( + 'project', + obj.primary_file.node._id, # Parent node id 'files', 'osfstorage', - obj.primary_file._id, # Preprint file deep_url + obj.primary_file._id, # Preprint file deep_url '?action=download' ) ) diff --git a/scripts/osfstorage/files_audit.py b/scripts/osfstorage/files_audit.py index fbd4028af179..b12a7b131884 100644 --- a/scripts/osfstorage/files_audit.py +++ b/scripts/osfstorage/files_audit.py @@ -18,14 +18,13 @@ import pyrax -from modularodm import Q from boto.glacier.layer2 import Layer2 from pyrax.exceptions import NoSuchObject from framework.celery_tasks import app as celery_app from website.app import init_app -from website.files import models +from osf.models import FileVersion from scripts import utils as scripts_utils from scripts.osfstorage import utils as storage_utils @@ -122,12 +121,12 @@ def ensure_backups(version, dry_run): def glacier_targets(): - return models.FileVersion.objects.filter(location__has_key='object', metadata__archive__isnull=True) + return FileVersion.objects.filter(location__has_key='object', metadata__archive__isnull=True) def parity_targets(): # TODO: Add metadata.parity information from wb so we do not need to check remote services - return models.FileVersion.objects.filter(location__has_key='object') + return FileVersion.objects.filter(location__has_key='object') # & metadata__parity__isnull=True diff --git a/scripts/osfstorage/glacier_audit.py b/scripts/osfstorage/glacier_audit.py index b3a2f5b3570d..4b78793f0d4e 100644 --- a/scripts/osfstorage/glacier_audit.py +++ b/scripts/osfstorage/glacier_audit.py @@ -17,7 +17,7 @@ from framework.celery_tasks import app as celery_app from website.app import init_app -from website.files import models +from osf.models import FileVersion from scripts import utils as scripts_utils from scripts.osfstorage import settings as storage_settings @@ -66,7 +66,7 @@ def get_job(vault, job_id=None): def get_targets(date): - return models.FileVersion.find( + return FileVersion.find( Q('date_created', 'lt', date - DELTA_DATE) & Q('status', 'ne', 'cached') & Q('metadata.archive', 'exists', True) & diff --git a/scripts/osfstorage/usage_audit.py b/scripts/osfstorage/usage_audit.py index c011eeeb6b01..d89a188a3626 100644 --- a/scripts/osfstorage/usage_audit.py +++ b/scripts/osfstorage/usage_audit.py @@ -20,16 +20,14 @@ from osf.models import TrashedFile from website import mails -from website.models import User from website.app import init_app -from website.project.model import Node from scripts import utils as scripts_utils # App must be init'd before django models are imported init_app(set_backends=True, routes=False) -from osf.models import StoredFileNode, TrashedFileNode, FileVersion +from osf.models import StoredFileNode, FileVersion, OSFUser as User, AbstractNode as Node logger = logging.getLogger(__name__) logging.basicConfig(level=logging.INFO) diff --git a/scripts/populate_conferences.py b/scripts/populate_conferences.py index 9c251c25896c..ee573f4f8f54 100644 --- a/scripts/populate_conferences.py +++ b/scripts/populate_conferences.py @@ -4,14 +4,15 @@ import os import sys +import django from modularodm import Q from modularodm.exceptions import ModularOdmException +django.setup() -from framework.auth.core import User +from osf.models import Conference, OSFUser as User from website import settings from website.app import init_app -from website.conferences.model import Conference from datetime import datetime diff --git a/scripts/populate_institutions.py b/scripts/populate_institutions.py index b89e6909f622..1c9c6ed253bd 100644 --- a/scripts/populate_institutions.py +++ b/scripts/populate_institutions.py @@ -13,7 +13,7 @@ from website import settings from website.app import init_app -from website.models import Institution, Node +from osf.models import Institution, Node from website.search.search import update_institution, update_node logger = logging.getLogger(__name__) diff --git a/scripts/populate_preprint_providers.py b/scripts/populate_preprint_providers.py index 8be4fd8d7c84..38ae528d98a0 100644 --- a/scripts/populate_preprint_providers.py +++ b/scripts/populate_preprint_providers.py @@ -13,7 +13,7 @@ import django django.setup() -from website.models import Subject, PreprintProvider, NodeLicense +from osf.models import Subject, PreprintProvider, NodeLicense logger = logging.getLogger(__name__) logging.basicConfig(level=logging.INFO) diff --git a/scripts/refresh_addon_tokens.py b/scripts/refresh_addon_tokens.py index 9c1cc8623a1a..fd7f235554c9 100644 --- a/scripts/refresh_addon_tokens.py +++ b/scripts/refresh_addon_tokens.py @@ -6,9 +6,11 @@ import time from django.utils import timezone +import django from modularodm import Q from oauthlib.oauth2 import OAuth2Error from dateutil.relativedelta import relativedelta +django.setup() from framework.celery_tasks import app as celery_app @@ -18,7 +20,7 @@ from addons.box.models import Provider as Box from addons.googledrive.models import GoogleDriveProvider from addons.mendeley.models import Mendeley -from website.oauth.models import ExternalAccount +from osf.models import ExternalAccount logger = logging.getLogger(__name__) logging.basicConfig(level=logging.INFO) diff --git a/scripts/retract_registrations.py b/scripts/retract_registrations.py index 1cb6aea95ba4..9dcc5401f3a8 100644 --- a/scripts/retract_registrations.py +++ b/scripts/retract_registrations.py @@ -2,16 +2,18 @@ import logging +import django from django.db import transaction from django.utils import timezone from modularodm import Q +django.setup() from framework.auth import Auth from framework.celery_tasks import app as celery_app from website.app import init_app from website import models, settings -from website.project.model import NodeLog +from osf.models import NodeLog from scripts import utils as scripts_utils diff --git a/scripts/send_queued_mails.py b/scripts/send_queued_mails.py index d519718a7c89..a91f4c42c976 100644 --- a/scripts/send_queued_mails.py +++ b/scripts/send_queued_mails.py @@ -1,13 +1,16 @@ import logging +import django from django.db import transaction from django.utils import timezone from modularodm import Q +django.setup() from framework.celery_tasks import app as celery_app +from osf.models.queued_mail import QueuedMail from website.app import init_app -from website import mails, settings +from website import settings from scripts.utils import add_file_logger @@ -46,7 +49,7 @@ def main(dry_run=True): def find_queued_mails_ready_to_be_sent(): - return mails.QueuedMail.find( + return QueuedMail.find( Q('send_at', 'lt', timezone.now()) & Q('sent_at', 'eq', None) ) @@ -55,7 +58,7 @@ def find_queued_mails_ready_to_be_sent(): def pop_and_verify_mails_for_each_user(user_queue): for user_emails in user_queue.values(): mail = user_emails[0] - mails_past_week = mails.QueuedMail.find( + mails_past_week = QueuedMail.find( Q('user', 'eq', mail.user) & Q('sent_at', 'gt', timezone.now() - settings.WAIT_BETWEEN_MAILS) ) diff --git a/scripts/triggered_mails.py b/scripts/triggered_mails.py index f22f9a9391ba..5ff6e33612ab 100644 --- a/scripts/triggered_mails.py +++ b/scripts/triggered_mails.py @@ -4,11 +4,11 @@ from django.utils import timezone from modularodm import Q -from framework.auth import User from framework.celery_tasks import app as celery_app - +from osf.models import OSFUser as User +from osf.models.queued_mail import NO_LOGIN_TYPE, NO_LOGIN, QueuedMail, queue_mail from website.app import init_app -from website import mails, settings +from website import settings from scripts.utils import add_file_logger @@ -23,9 +23,9 @@ def main(dry_run=True): logger.warn('Email of type no_login queued to {0}'.format(user.username)) if not dry_run: with transaction.atomic(): - mails.queue_mail( + queue_mail( to_addr=user.username, - mail=mails.NO_LOGIN, + mail=NO_LOGIN, send_at=timezone.now(), user=user, fullname=user.fullname, @@ -37,7 +37,7 @@ def find_inactive_users_with_no_inactivity_email_sent_or_queued(): (Q('date_last_login', 'lt', timezone.now() - settings.NO_LOGIN_WAIT_TIME) & Q('tags__name', 'ne', 'osf4m')) | (Q('date_last_login', 'lt', timezone.now() - settings.NO_LOGIN_OSF4M_WAIT_TIME) & Q('tags__name', 'eq', 'osf4m')) ) - inactive_emails = mails.QueuedMail.find(Q('email_type', 'eq', mails.NO_LOGIN_TYPE)) + inactive_emails = QueuedMail.find(Q('email_type', 'eq', NO_LOGIN_TYPE)) #This is done to prevent User query returns comparison to User, as equality fails #on datetime fields due to pymongo rounding. Instead here _id is compared. @@ -53,4 +53,3 @@ def run_main(dry_run=True): if not dry_run: add_file_logger(logger, __file__) main(dry_run=dry_run) - diff --git a/tests/base.py b/tests/base.py index 321e1b24ad18..3a0faf5588ef 100644 --- a/tests/base.py +++ b/tests/base.py @@ -4,10 +4,7 @@ import datetime as dt import functools import logging -import os import re -import shutil -import tempfile import unittest import uuid @@ -20,23 +17,21 @@ from django.test.utils import override_settings from django.test import TestCase as DjangoTestCase from faker import Factory -from framework.auth import User from framework.auth.core import Auth from framework.celery_tasks.handlers import celery_before_request from framework.django.handlers import handlers as django_handlers from framework.flask import rm_handlers -from framework.guid.model import Guid from framework.mongo import client as client_proxy from framework.mongo import database as database_proxy -from framework.sessions.model import Session from framework.transactions import commands, messages, utils +from osf.models import OSFUser as User, Guid, Session +from osf.models import MetaSchema, AbstractNode as Node, NodeLog, Tag from pymongo.errors import OperationFailure from website import settings from website.app import init_app from website.notifications.listeners import (subscribe_contributor, subscribe_creator) -from website.project.model import (MetaSchema, Node, NodeLog, Tag, WatchConfig, - ensure_schemas) +from website.project.model import ensure_schemas from website.project.signals import contributor_added, project_created from website.project.views.contributor import notify_added_contributor from website.signals import ALL_SIGNALS @@ -94,7 +89,7 @@ def get_default_metaschema(): # All Models MODELS = (User, Node, NodeLog, NodeWikiPage, - Tag, WatchConfig, Session, Guid) + Tag, Session, Guid) def teardown_database(client=None, database=None): diff --git a/tests/factories.py b/tests/factories.py index 8c4263138a15..f2fcbda2c8fa 100644 --- a/tests/factories.py +++ b/tests/factories.py @@ -13,7 +13,6 @@ Factory boy docs: http://factoryboy.readthedocs.org/ """ -import datetime import functools from django.utils import timezone @@ -23,41 +22,27 @@ from modularodm import Q from modularodm.exceptions import NoResultsFound -from framework.auth import User, Auth +from framework.auth import Auth from framework.auth.utils import impute_names_model, impute_names -from framework.guid.model import Guid from framework.mongo import StoredObject -from framework.sessions.model import Session from tests.base import fake from tests.base import get_default_metaschema from tests import mock_addons as addons_base from addons.wiki.models import NodeWikiPage -from website.oauth.models import ( - ApiOAuth2Application, - ApiOAuth2PersonalToken, - ExternalAccount, - ExternalProvider -) -from website.preprints.model import PreprintProvider, PreprintService -from website.project.model import ( - Comment, DraftRegistration, MetaSchema, Node, NodeLog, Pointer, - PrivateLink, Tag, WatchConfig, AlternativeCitation, - ensure_schemas, Institution -) -from website.project.sanctions import ( - Embargo, - RegistrationApproval, - Retraction, - Sanction, -) -from website.project.taxonomies import Subject -from website.notifications.model import NotificationSubscription, NotificationDigest -from website.archiver.model import ArchiveTarget, ArchiveJob -from website.identifiers.model import Identifier +from website.project.model import ensure_schemas +from addons.osfstorage.models import OsfStorageFile +from osf.models import (Subject, NotificationSubscription, NotificationDigest, + ArchiveJob, ArchiveTarget, Identifier, NodeLicense, + NodeLicenseRecord, Embargo, RegistrationApproval, + Retraction, Sanction, Comment, DraftRegistration, + MetaSchema, AbstractNode as Node, NodeLog, + PrivateLink, Tag, AlternativeCitation, Institution, + ApiOAuth2PersonalToken, ApiOAuth2Application, ExternalAccount, + ExternalProvider, OSFUser as User, PreprintService, + PreprintProvider, Session, Guid) from website.archiver import ARCHIVER_SUCCESS -from website.project.licenses import NodeLicense, NodeLicenseRecord, ensure_licenses +from website.project.licenses import ensure_licenses from website.util import permissions -from website.files.models.osfstorage import OsfStorageFile from website.exceptions import InvalidSanctionApprovalToken ensure_licenses = functools.partial(ensure_licenses, warn=False) @@ -437,12 +422,6 @@ def _create(cls, *args, **kwargs): return fork -class PointerFactory(ModularOdmFactory): - class Meta: - model = Pointer - node = SubFactory(NodeFactory) - - class NodeLogFactory(ModularOdmFactory): class Meta: model = NodeLog @@ -450,12 +429,6 @@ class Meta: user = SubFactory(UserFactory) -class WatchConfigFactory(ModularOdmFactory): - class Meta: - model = WatchConfig - node = SubFactory(NodeFactory) - - class SanctionFactory(ModularOdmFactory): class Meta: abstract = True diff --git a/tests/framework_tests/test_sentry.py b/tests/framework_tests/test_sentry.py index aed08b0d51ce..0635b0aadf4d 100644 --- a/tests/framework_tests/test_sentry.py +++ b/tests/framework_tests/test_sentry.py @@ -10,9 +10,8 @@ import functools from framework import sentry -from framework.sessions import Session, set_session - -from website import settings +from framework.sessions import set_session +from osf.models import Session def set_sentry(status): @@ -29,8 +28,6 @@ def wrapped(*args, **kwargs): with_sentry = set_sentry(True) without_sentry = set_sentry(False) - - @with_sentry @mock.patch('framework.sentry.sentry.captureException') def test_log_no_request_context(mock_capture): diff --git a/tests/mock_addons.py b/tests/mock_addons.py index ce44617162d2..dafe7de8cca3 100644 --- a/tests/mock_addons.py +++ b/tests/mock_addons.py @@ -19,7 +19,7 @@ from website import settings from addons.base import serializer, logger -from website.project.model import Node, User +from osf.models import OSFUser as User, AbstractNode as Node from website.util import waterbutler_url_for from website.oauth.signals import oauth_complete diff --git a/tests/test_addons.py b/tests/test_addons.py index e5a1d816bc17..2d6d46fb5f3e 100644 --- a/tests/test_addons.py +++ b/tests/test_addons.py @@ -13,7 +13,6 @@ from framework.auth import cas, signing from framework.auth.core import Auth from framework.exceptions import HTTPError -from framework.sessions.model import Session from modularodm import Q from nose.tools import * # noqa from osf_tests import factories @@ -25,10 +24,11 @@ from addons.github.exceptions import ApiError from addons.github.models import GithubFolder, GithubFile, GithubFileNode from addons.github.tests.factories import GitHubAccountFactory +from osf.models import Session, MetaSchema from osf.models import files as file_models from osf.models.files import StoredFileNode, TrashedFileNode from website.project import new_private_link -from website.project.model import MetaSchema, ensure_schemas +from website.project.model import ensure_schemas from website.project.views.node import _view_project as serialize_node from website.util import api_url_for, rubeus diff --git a/tests/test_alternative_citations.py b/tests/test_alternative_citations.py index ffc08874e617..5b30c6461a60 100644 --- a/tests/test_alternative_citations.py +++ b/tests/test_alternative_citations.py @@ -3,7 +3,7 @@ from framework.auth.core import Auth -from website.project.model import AlternativeCitation +from osf.models import AlternativeCitation from modularodm.exceptions import ValidationError diff --git a/tests/test_auth.py b/tests/test_auth.py index ad4e2dd53a7f..ec403f9a82d4 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -14,7 +14,6 @@ from framework import auth from framework.auth import cas from framework.auth.utils import validate_recaptcha -from framework.sessions import Session from framework.exceptions import HTTPError from tests.base import OsfTestCase, assert_is_redirect, fake from osf_tests.factories import ( @@ -22,9 +21,9 @@ ProjectFactory, NodeFactory, AuthUserFactory, PrivateLinkFactory ) -from framework.auth import User, Auth +from framework.auth import Auth from framework.auth.decorators import must_be_logged_in - +from osf.models import OSFUser as User, Session from website import mails from website import settings from website.util import permissions @@ -248,7 +247,7 @@ def test_repr(self): def test_factory(self): auth_obj = AuthFactory() - assert_true(isinstance(auth_obj.user, auth.User)) + assert_true(isinstance(auth_obj.user, User)) def test_from_kwargs(self): user = UserFactory() diff --git a/tests/test_citations.py b/tests/test_citations.py index 0d3260126e48..1ab9850596c9 100644 --- a/tests/test_citations.py +++ b/tests/test_citations.py @@ -1,18 +1,14 @@ # -*- coding: utf-8 -*- - -import datetime - import pytest from django.utils import timezone -from flask import redirect from nose.tools import * # noqa from framework.auth.core import Auth from osf_tests.factories import AuthUserFactory, ProjectFactory, UserFactory from scripts import parse_citation_styles from tests.base import OsfTestCase +from osf.models import OSFUser as User, AbstractNode as Node from website.citations.utils import datetime_to_csl -from website.models import Node, User from website.util import api_url_for pytestmark = pytest.mark.django_db diff --git a/tests/test_conferences.py b/tests/test_conferences.py index 13491865e77c..1cd16ad58e30 100644 --- a/tests/test_conferences.py +++ b/tests/test_conferences.py @@ -15,8 +15,8 @@ from framework.auth import get_or_create_user from framework.auth.core import Auth +from osf.models import OSFUser as User, AbstractNode as Node from website import settings -from website.models import User, Node from website.conferences import views from website.conferences import utils, message from website.util import api_url_for, web_url_for diff --git a/tests/test_identifiers.py b/tests/test_identifiers.py index 1b9e3c6966c4..dae38862146e 100644 --- a/tests/test_identifiers.py +++ b/tests/test_identifiers.py @@ -14,9 +14,9 @@ import furl import lxml.etree +from osf.models import Identifier from website import settings from website.identifiers.utils import to_anvl -from website.identifiers.model import Identifier from website.identifiers.metadata import datacite_metadata_for_node from website.identifiers import metadata diff --git a/tests/test_mails.py b/tests/test_mails.py index 66cfb88ef37e..7426e6dca3c9 100644 --- a/tests/test_mails.py +++ b/tests/test_mails.py @@ -1,11 +1,15 @@ # -*- coding: utf-8 -*- import mock -from datetime import datetime, timedelta +from datetime import timedelta from django.utils import timezone from nose.tools import * # PEP 8 sserts -from website import mails, settings +from website import mails +from osf.models.queued_mail import ( + queue_mail, WELCOME_OSF4M, + NO_LOGIN, NO_ADDON, NEW_PUBLIC_PROJECT +) from osf_tests import factories from tests.base import OsfTestCase @@ -29,7 +33,7 @@ def setUp(self): self.user.save() def queue_mail(self, mail, user=None, send_at=None, **kwargs): - mail = mails.queue_mail( + mail = queue_mail( to_addr=user.username if user else self.user.username, send_at=send_at or timezone.now(), user=user or self.user, @@ -39,73 +43,73 @@ def queue_mail(self, mail, user=None, send_at=None, **kwargs): ) return mail - @mock.patch('website.mails.queued_mails.send_mail') + @mock.patch('osf.models.queued_mail.send_mail') def test_no_login_presend_for_active_user(self, mock_mail): user = factories.AuthUserFactory() - mail = self.queue_mail(mail=mails.NO_LOGIN, user=user) + mail = self.queue_mail(mail=NO_LOGIN, user=user) user.date_last_login = timezone.now() + timedelta(seconds=10) user.save() assert_false(mail.send_mail()) - @mock.patch('website.mails.queued_mails.send_mail') + @mock.patch('osf.models.queued_mail.send_mail') def test_no_login_presend_for_inactive_user(self, mock_mail): user = factories.AuthUserFactory() - mail = self.queue_mail(mail=mails.NO_LOGIN, user=user) + mail = self.queue_mail(mail=NO_LOGIN, user=user) user.date_last_login = timezone.now() - timedelta(weeks=10) user.save() assert_true(timezone.now() - timedelta(days=1) > user.date_last_login) assert_true(mail.send_mail()) - @mock.patch('website.mails.queued_mails.send_mail') + @mock.patch('osf.models.queued_mail.send_mail') def test_no_addon_presend(self, mock_mail): - mail = self.queue_mail(mail=mails.NO_ADDON) + mail = self.queue_mail(mail=NO_ADDON) assert_true(mail.send_mail()) - @mock.patch('website.mails.queued_mails.send_mail') + @mock.patch('osf.models.queued_mail.send_mail') def test_new_public_project_presend_for_no_project(self, mock_mail): mail = self.queue_mail( - mail=mails.NEW_PUBLIC_PROJECT, + mail=NEW_PUBLIC_PROJECT, project_title='Oh noes', nid='', ) assert_false(mail.send_mail()) - @mock.patch('website.mails.queued_mails.send_mail') + @mock.patch('osf.models.queued_mail.send_mail') def test_new_public_project_presend_success(self, mock_mail): node = factories.ProjectFactory() node.is_public = True node.save() mail = self.queue_mail( - mail=mails.NEW_PUBLIC_PROJECT, + mail=NEW_PUBLIC_PROJECT, project_title='Oh yass', nid=node._id ) assert_true(mail.send_mail()) - @mock.patch('website.mails.queued_mails.send_mail') + @mock.patch('osf.models.queued_mail.send_mail') def test_welcome_osf4m_presend(self, mock_mail): self.user.date_last_login = timezone.now() - timedelta(days=13) self.user.save() mail = self.queue_mail( - mail=mails.WELCOME_OSF4M, + mail=WELCOME_OSF4M, conference='Buttjamz conference', fid='' ) assert_true(mail.send_mail()) assert_equal(mail.data['downloads'], 0) - @mock.patch('website.mails.queued_mails.send_mail') + @mock.patch('osf.models.queued_mail.send_mail') def test_finding_other_emails_sent_to_user(self, mock_mail): user = factories.UserFactory() mail = self.queue_mail( user=user, - mail=mails.NO_ADDON, + mail=NO_ADDON, ) assert_equal(len(mail.find_sent_of_same_type_and_user()), 0) mail.send_mail() assert_equal(len(mail.find_sent_of_same_type_and_user()), 1) - @mock.patch('website.mails.queued_mails.send_mail') + @mock.patch('osf.models.queued_mail.send_mail') def test_user_is_active(self, mock_mail): user = factories.UserFactory() user.set_password('myprecious') @@ -116,33 +120,33 @@ def test_user_is_active(self, mock_mail): user.save() mail = self.queue_mail( user=user, - mail=mails.NO_ADDON, + mail=NO_ADDON, ) assert_true(mail.send_mail()) - @mock.patch('website.mails.queued_mails.send_mail') + @mock.patch('osf.models.queued_mail.send_mail') def test_user_is_not_active_no_password(self, mock_mail): user = factories.UserFactory() user.set_unusable_password() user.save() mail = self.queue_mail( user=user, - mail=mails.NO_ADDON, + mail=NO_ADDON, ) assert_false(mail.send_mail()) - @mock.patch('website.mails.queued_mails.send_mail') + @mock.patch('osf.models.queued_mail.send_mail') def test_user_is_not_active_not_registered(self, mock_mail): user = factories.UserFactory() user.is_registered = False user.save() mail = self.queue_mail( user=user, - mail=mails.NO_ADDON, + mail=NO_ADDON, ) assert_false(mail.send_mail()) - @mock.patch('website.mails.queued_mails.send_mail') + @mock.patch('osf.models.queued_mail.send_mail') def test_user_is_not_active_is_merged(self, mock_mail): user = factories.UserFactory() other_user = factories.UserFactory() @@ -150,28 +154,28 @@ def test_user_is_not_active_is_merged(self, mock_mail): user.save() mail = self.queue_mail( user=user, - mail=mails.NO_ADDON, + mail=NO_ADDON, ) assert_false(mail.send_mail()) - @mock.patch('website.mails.queued_mails.send_mail') + @mock.patch('osf.models.queued_mail.send_mail') def test_user_is_not_active_is_disabled(self, mock_mail): user = factories.UserFactory() user.date_disabled = timezone.now() user.save() mail = self.queue_mail( user=user, - mail=mails.NO_ADDON, + mail=NO_ADDON, ) assert_false(mail.send_mail()) - @mock.patch('website.mails.queued_mails.send_mail') + @mock.patch('osf.models.queued_mail.send_mail') def test_user_is_not_active_is_not_confirmed(self, mock_mail): user = factories.UserFactory() user.date_confirmed = None user.save() mail = self.queue_mail( user=user, - mail=mails.NO_ADDON, + mail=NO_ADDON, ) assert_false(mail.send_mail()) diff --git a/tests/test_metadata.py b/tests/test_metadata.py index 20d533018769..85f1543031dd 100644 --- a/tests/test_metadata.py +++ b/tests/test_metadata.py @@ -7,7 +7,7 @@ from modularodm.exceptions import ValidationError from modularodm import Q -from website.project.model import MetaSchema +from osf.models import MetaSchema from website.project.model import ensure_schemas from website.project.metadata.schemas import OSF_META_SCHEMAS diff --git a/tests/test_node_licenses.py b/tests/test_node_licenses.py index 6bfb0c2b4abf..25c2aea45802 100644 --- a/tests/test_node_licenses.py +++ b/tests/test_node_licenses.py @@ -15,10 +15,10 @@ from tests.base import OsfTestCase from tests.utils import assert_logs, assert_not_logs from website import settings -from website.project.licenses import (NodeLicense, ensure_licenses, - serialize_node_license, - serialize_node_license_record) -from website.project.model import NodeLog, NodeStateError +from osf.models.licenses import NodeLicense, serialize_node_license_record, serialize_node_license +from website.project.licenses import ensure_licenses +from osf.models import NodeLog +from website.exceptions import NodeStateError ensure_licenses = functools.partial(ensure_licenses, warn=False) diff --git a/tests/test_notifications.py b/tests/test_notifications.py index 726929032c4c..480861a12645 100644 --- a/tests/test_notifications.py +++ b/tests/test_notifications.py @@ -1,7 +1,5 @@ import collections -import datetime import mock -import pytz from babel import dates, Locale from schema import Schema, And, Use, Or from django.utils import timezone @@ -11,7 +9,7 @@ from nose.tools import * # noqa PEP8 asserts from framework.auth import Auth -from osf.models import Node, Comment, NotificationDigest, NotificationSubscription, Guid, OSFUser +from osf.models import Comment, NotificationDigest, NotificationSubscription, Guid, OSFUser from website.notifications.tasks import get_users_emails, send_users_email, group_by_node, remove_notifications from website.notifications import constants diff --git a/tests/test_oauth.py b/tests/test_oauth.py index 335a37c0451d..3abe567af3a4 100644 --- a/tests/test_oauth.py +++ b/tests/test_oauth.py @@ -13,12 +13,7 @@ from framework.auth import authenticate from framework.exceptions import PermissionsError, HTTPError from framework.sessions import session -from website.oauth.models import ( - ExternalAccount, - ExternalProvider, - OAUTH1, - OAUTH2, -) +from osf.models.external import ExternalAccount, ExternalProvider, OAUTH1, OAUTH2 from website.util import api_url_for, web_url_for from tests.base import OsfTestCase diff --git a/tests/test_preprints.py b/tests/test_preprints.py index f0af7eb7da26..41c9190d0475 100644 --- a/tests/test_preprints.py +++ b/tests/test_preprints.py @@ -4,7 +4,7 @@ import urlparse from framework.celery_tasks import handlers -from website.files.models.osfstorage import OsfStorageFile +from addons.osfstorage.models import OsfStorageFile from website.preprints.tasks import format_preprint from website.util import permissions diff --git a/tests/test_registrations/base.py b/tests/test_registrations/base.py index fa0853510d28..98d1a312a2a3 100644 --- a/tests/test_registrations/base.py +++ b/tests/test_registrations/base.py @@ -6,7 +6,7 @@ from framework.auth import Auth from website.util import permissions -from website.models import MetaSchema +from osf.models import MetaSchema from website.project.model import ensure_schemas from tests.base import OsfTestCase diff --git a/tests/test_registrations/test_approvals.py b/tests/test_registrations/test_approvals.py index f13b6c6e6338..ff3399fdee83 100644 --- a/tests/test_registrations/test_approvals.py +++ b/tests/test_registrations/test_approvals.py @@ -6,15 +6,10 @@ from nose.tools import * # noqa from tests.base import OsfTestCase from osf_tests.factories import ( - AuthUserFactory, EmbargoFactory, NodeFactory, ProjectFactory, - RegistrationFactory, UserFactory, UnconfirmedUserFactory, DraftRegistrationFactory + EmbargoFactory, ProjectFactory, + RegistrationFactory, UserFactory, DraftRegistrationFactory ) -from framework.exceptions import PermissionsError -from modularodm.exceptions import ValidationValueError -from website.exceptions import ( - InvalidSanctionRejectionToken, InvalidSanctionApprovalToken, NodeStateError, -) from website import tokens diff --git a/tests/test_registrations/test_embargoes.py b/tests/test_registrations/test_embargoes.py index 9d5f3feba2de..e2efa81842dc 100644 --- a/tests/test_registrations/test_embargoes.py +++ b/tests/test_registrations/test_embargoes.py @@ -24,12 +24,11 @@ InvalidSanctionRejectionToken, InvalidSanctionApprovalToken, NodeStateError, ) from website import tokens -from website.models import Embargo, Node, User +from osf.models import AbstractNode as Node from website.project.model import ensure_schemas from osf.models.sanctions import PreregCallbackMixin, Embargo from website.util import permissions -from website.project.spam.model import SpamStatus -from osf.models import Registration, Contributor +from osf.models import Registration, Contributor, OSFUser as User, SpamStatus DUMMY_TOKEN = tokens.encode({ 'dummy': 'token' diff --git a/tests/test_registrations/test_registration_approvals.py b/tests/test_registrations/test_registration_approvals.py index c15f46e60caf..fb949f4930ab 100644 --- a/tests/test_registrations/test_registration_approvals.py +++ b/tests/test_registrations/test_registration_approvals.py @@ -4,7 +4,6 @@ from django.utils import timezone from nose.tools import * # noqa from tests.base import fake, OsfTestCase -from website.project.spam.model import SpamStatus from osf_tests.factories import ( EmbargoFactory, NodeFactory, ProjectFactory, RegistrationFactory, UserFactory, UnconfirmedUserFactory @@ -21,7 +20,7 @@ RegistrationApproval, ) from framework.auth import Auth -from osf.models import Contributor +from osf.models import Contributor, SpamStatus DUMMY_TOKEN = tokens.encode({ diff --git a/tests/test_registrations/test_retractions.py b/tests/test_registrations/test_retractions.py index 0d7db2e94645..165fe205a876 100644 --- a/tests/test_registrations/test_retractions.py +++ b/tests/test_registrations/test_retractions.py @@ -23,8 +23,7 @@ InvalidSanctionApprovalToken, InvalidSanctionRejectionToken, NodeStateError, ) -from website.models import Retraction -from osf.models import Contributor +from osf.models import Contributor, Retraction class RegistrationRetractionModelsTestCase(OsfTestCase): diff --git a/tests/test_registrations/test_views.py b/tests/test_registrations/test_views.py index 239d42828165..8c3299d28ff1 100644 --- a/tests/test_registrations/test_views.py +++ b/tests/test_registrations/test_views.py @@ -14,7 +14,7 @@ from framework.exceptions import HTTPError -from website.models import Node, MetaSchema, DraftRegistration +from osf.models import MetaSchema, DraftRegistration from website.project.metadata.schemas import ACTIVE_META_SCHEMAS, _name_to_id from website.util import permissions, api_url_for from website.project.views import drafts as draft_views @@ -144,7 +144,7 @@ def test_submit_draft_for_review_non_admin(self): ) assert_equal(res.status_code, http.FORBIDDEN) - @mock.patch('website.project.model.DraftRegistration.register', autospec=True) + @mock.patch('osf.models.DraftRegistration.register', autospec=True) def test_register_draft_registration(self, mock_register_draft): url = self.node.api_url_for('register_draft_registration', draft_id=self.draft._id) @@ -550,7 +550,7 @@ def test_check_draft_state_registered_and_deleted_and_approved(self): reg.is_deleted = True reg.save() - with mock.patch('website.project.model.DraftRegistration.is_approved', mock.PropertyMock(return_value=True)): + with mock.patch('osf.models.DraftRegistration.is_approved', mock.PropertyMock(return_value=True)): try: draft_views.check_draft_state(self.draft) except HTTPError: diff --git a/tests/test_serializers.py b/tests/test_serializers.py index 8a79368f0e04..261422b3b60b 100644 --- a/tests/test_serializers.py +++ b/tests/test_serializers.py @@ -11,13 +11,11 @@ RegistrationFactory, NodeFactory, CollectionFactory, - BookmarkCollectionFactory, ) from osf.models import NodeRelation from tests.base import OsfTestCase, get_default_metaschema from framework.auth import Auth -from framework import utils as framework_utils from website.project.views.node import _view_project, _serialize_node_search, _get_children, _get_readable_descendants from website.profile.views import get_public_projects, get_public_components from website.views import serialize_node_summary diff --git a/tests/test_spam_mixin.py b/tests/test_spam_mixin.py index 3755f7f81f53..50e4c9554fb2 100644 --- a/tests/test_spam_mixin.py +++ b/tests/test_spam_mixin.py @@ -1,5 +1,4 @@ from __future__ import absolute_import -from datetime import datetime from django.utils import timezone from nose.tools import * # noqa PEP8 asserts @@ -9,7 +8,7 @@ from tests.base import OsfTestCase from osf_tests.factories import UserFactory, CommentFactory -from website.project.spam.model import SpamStatus +from osf.models import SpamStatus class TestSpamMixin(OsfTestCase): @@ -23,7 +22,7 @@ def test_report_abuse(self): user = UserFactory() time = timezone.now() self.comment.report_abuse( - user, date=time, category='spam', text='ads', save=True) + user, date=time, category='spam', text='ads', save=True) assert_equal(self.comment.spam_status, SpamStatus.FLAGGED) equivalent = dict( date=time, @@ -47,7 +46,7 @@ def test_retract_report(self): user = UserFactory() time = timezone.now() self.comment.report_abuse( - user, date=time, category='spam', text='ads', save=True + user, date=time, category='spam', text='ads', save=True ) assert_equal(self.comment.spam_status, SpamStatus.FLAGGED) self.comment.retract_report(user, save=True) @@ -65,7 +64,7 @@ def test_retract_report_not_reporter(self): reporter = UserFactory() non_reporter = UserFactory() self.comment.report_abuse( - reporter, category='spam', text='ads', save=True + reporter, category='spam', text='ads', save=True ) with assert_raises(ValueError): self.comment.retract_report(non_reporter, save=True) @@ -76,11 +75,11 @@ def test_retract_one_report_of_many(self): user_2 = UserFactory() time = timezone.now() self.comment.report_abuse( - user_1, date=time, category='spam', text='ads', save=True + user_1, date=time, category='spam', text='ads', save=True ) assert_equal(self.comment.spam_status, SpamStatus.FLAGGED) self.comment.report_abuse( - user_2, date=time, category='spam', text='all', save=True + user_2, date=time, category='spam', text='all', save=True ) self.comment.retract_report(user_1, save=True) equivalent = { @@ -101,7 +100,7 @@ def test_flag_spam(self): def test_cannot_remove_flag_not_retracted(self): user = UserFactory() self.comment.report_abuse( - user, category='spam', text='ads', save=True + user, category='spam', text='ads', save=True ) self.comment.remove_flag(save=True) assert_equal(self.comment.spam_status, SpamStatus.FLAGGED) diff --git a/tests/test_subjects.py b/tests/test_subjects.py index 54f08b832bc6..bd9970d149d8 100644 --- a/tests/test_subjects.py +++ b/tests/test_subjects.py @@ -6,7 +6,7 @@ from tests.base import OsfTestCase from osf_tests.factories import SubjectFactory, PreprintFactory, PreprintProviderFactory -from website.project.taxonomies import validate_subject_hierarchy +from osf.models.validators import validate_subject_hierarchy class TestSubjectTreeValidation(OsfTestCase): diff --git a/tests/test_test_utils.py b/tests/test_test_utils.py index 259f55eb416a..5f996226a4ad 100644 --- a/tests/test_test_utils.py +++ b/tests/test_test_utils.py @@ -7,7 +7,7 @@ from modularodm import Q from framework.auth import Auth -from website.models import Node, NodeLog +from osf.models import AbstractNode as Node, NodeLog from tests.base import OsfTestCase from tests.factories import ProjectFactory diff --git a/tests/test_tokens.py b/tests/test_tokens.py index e3d1fd8ee1ce..8c3405aa1cd3 100644 --- a/tests/test_tokens.py +++ b/tests/test_tokens.py @@ -13,8 +13,7 @@ from framework.exceptions import HTTPError from website import settings -from website.models import Node -from website.project.sanctions import Embargo, RegistrationApproval, Retraction, Sanction +from osf.models import AbstractNode as Node, Embargo, RegistrationApproval, Retraction, Sanction from website.tokens import decode, encode, TokenHandler from website.tokens.exceptions import TokenHandlerNotFound diff --git a/tests/test_views.py b/tests/test_views.py index f1cc3f2b2fe4..dee95c28e9f8 100644 --- a/tests/test_views.py +++ b/tests/test_views.py @@ -8,7 +8,6 @@ import httplib as http import json import time -import pytz import unittest from flask import request @@ -29,7 +28,6 @@ from framework.auth.campaigns import get_campaigns, is_institution_login, is_native_login, is_proxy_login, campaign_url_for from framework.auth import Auth from framework.auth.cas import get_login_url -from framework.auth.core import generate_verification_key from framework.auth.exceptions import InvalidTokenError from framework.auth.utils import impute_names_model, ensure_external_identity_uniqueness from framework.auth.views import login_and_register_handler @@ -40,7 +38,7 @@ from website import mailchimp_utils from website import mails, settings from addons.osfstorage import settings as osfstorage_settings -from website.models import Node, NodeLog, Pointer +from osf.models import AbstractNode as Node, NodeLog from website.profile.utils import add_contributor_json, serialize_unregistered from website.profile.views import fmt_date_or_none, update_osf_help_mails_subscription from website.project.decorators import check_can_access @@ -2464,7 +2462,7 @@ def test_invalid_claim_form_raise_400(self): res = self.app.get(url, expect_errors=True).maybe_follow() assert_equal(res.status_code, 400) - @mock.patch('framework.auth.core.User.update_search_nodes') + @mock.patch('osf.models.OSFUser.update_search_nodes') def test_posting_to_claim_form_with_valid_data(self, mock_update_search_nodes): url = self.user.get_claim_url(self.project._primary_key) res = self.app.post(url, { @@ -2485,7 +2483,7 @@ def test_posting_to_claim_form_with_valid_data(self, mock_update_search_nodes): assert_true(self.user.is_active) assert_not_in(self.project._primary_key, self.user.unclaimed_records) - @mock.patch('framework.auth.core.User.update_search_nodes') + @mock.patch('osf.models.OSFUser.update_search_nodes') def test_posting_to_claim_form_removes_all_unclaimed_data(self, mock_update_search_nodes): # user has multiple unclaimed records p2 = ProjectFactory(creator=self.referrer) @@ -2502,7 +2500,7 @@ def test_posting_to_claim_form_removes_all_unclaimed_data(self, mock_update_sear self.user.reload() assert_equal(self.user.unclaimed_records, {}) - @mock.patch('framework.auth.core.User.update_search_nodes') + @mock.patch('osf.models.OSFUser.update_search_nodes') def test_posting_to_claim_form_sets_fullname_to_given_name(self, mock_update_search_nodes): # User is created with a full name original_name = fake.name() @@ -2940,11 +2938,9 @@ def test_remove_pointer_not_found(self): def test_remove_pointer_not_in_nodes(self): url = self.project.api_url + 'pointer/' - node = NodeFactory() - pointer = Pointer() res = self.app.delete_json( url, - {'pointerId': pointer._id}, + {'pointerId': 'somefakeid'}, auth=self.user.auth, expect_errors=True ) @@ -2978,11 +2974,9 @@ def test_fork_pointer_not_found(self): def test_fork_pointer_not_in_nodes(self): url = self.project.api_url + 'pointer/fork/' - node = NodeFactory() - pointer = Pointer() res = self.app.post_json( url, - {'pointerId': pointer._id}, + {'pointerId': 'somefakeid'}, auth=self.user.auth, expect_errors=True ) @@ -3222,7 +3216,7 @@ def test_register_bad_captcha(self, _, validate_recaptcha): ) assert_equal(resp.status_code, http.BAD_REQUEST) - @mock.patch('framework.auth.core.User.update_search_nodes') + @mock.patch('osf.models.OSFUser.update_search_nodes') def test_register_after_being_invited_as_unreg_contributor(self, mock_update_search_nodes): # Regression test for: # https://github.com/CenterForOpenScience/openscienceframework.org/issues/861 diff --git a/tests/test_websitefiles.py b/tests/test_websitefiles.py index 6abfb2dcda67..2ac0b8ab3b99 100644 --- a/tests/test_websitefiles.py +++ b/tests/test_websitefiles.py @@ -1,7 +1,6 @@ # -*- coding: utf-8 -*- from __future__ import unicode_literals import mock -import pytest from django.utils import timezone from modularodm import Q from nose.tools import * # noqa @@ -9,13 +8,11 @@ from addons.osfstorage.models import OsfStorageFile, OsfStorageFolder, OsfStorageFileNode from addons.s3.models import S3File from osf.models import File -from osf.models import FileNode from osf.models import Folder from osf.models.files import BaseFileNode from tests.base import OsfTestCase from tests.factories import AuthUserFactory, ProjectFactory from website.files import exceptions -from website.files import utils from osf import models @@ -532,7 +529,7 @@ def test_update_version_metadata(self): v1.refresh_from_db() assert_equal(v1.size, 1337) - @mock.patch('website.files.models.base.requests.get') + @mock.patch('osf.models.files.requests.get') def test_touch(self, mock_requests): file = TestFile( _path='/afile', @@ -561,7 +558,7 @@ def test_touch(self, mock_requests): assert_equals(v.size, 0xDEADBEEF) assert_equals(file.versions.count(), 0) - @mock.patch('website.files.models.base.requests.get') + @mock.patch('osf.models.files.requests.get') def test_touch_caching(self, mock_requests): file = TestFile( _path='/afile', @@ -588,7 +585,7 @@ def test_touch_caching(self, mock_requests): assert_equals(file.versions.count(), 1) assert_equals(file.touch(None, revision='foo'), v) - @mock.patch('website.files.models.base.requests.get') + @mock.patch('osf.models.files.requests.get') def test_touch_auth(self, mock_requests): file = TestFile( _path='/afile', diff --git a/tests/test_webtests.py b/tests/test_webtests.py index 2c3f9e806443..ded543dc8283 100644 --- a/tests/test_webtests.py +++ b/tests/test_webtests.py @@ -23,7 +23,7 @@ from addons.wiki.tests.factories import NodeWikiFactory from osf.models import AbstractNode as Node from website import settings, language -from website.files.models.osfstorage import OsfStorageFile +from addons.osfstorage.models import OsfStorageFile from website.util import web_url_for, api_url_for, permissions from api_tests import utils as test_utils diff --git a/tests/utils.py b/tests/utils.py index e1a9c76c34e6..873e030f55a6 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -13,7 +13,7 @@ from website.archiver import ARCHIVER_SUCCESS from website.archiver import listeners as archiver_listeners -from website.project.sanctions import Sanction +from osf.models import Sanction from tests.base import get_default_metaschema diff --git a/website/app.py b/website/app.py index 56876669c648..1e532eee6519 100644 --- a/website/app.py +++ b/website/app.py @@ -3,16 +3,13 @@ import framework import importlib -import itertools import json import logging import os -import sys import thread from collections import OrderedDict import django -import modularodm from api.caching import listeners # noqa from django.apps import apps from framework.addons.utils import render_addon_capabilities @@ -27,7 +24,6 @@ from framework.transactions import handlers as transaction_handlers # Imports necessary to connect signals from website.archiver import listeners # noqa -from website.files.models import FileNode from website.mails import listeners # noqa from website.notifications import listeners # noqa from website.project.licenses import ensure_licenses @@ -114,8 +110,6 @@ def init_app(settings_module='website.settings', set_backends=True, routes=True, with open(os.path.join(settings.STATIC_FOLDER, 'built', 'nodeCategories.json'), 'wb') as fp: json.dump(settings.NODE_CATEGORY_MAP, fp) - patch_models(settings) - app.debug = settings.DEBUG_MODE # default config for flask app, however, this does not affect setting cookie using set_cookie() @@ -154,41 +148,3 @@ def apply_middlewares(flask_app, settings): flask_app.wsgi_app = ProxyFix(flask_app.wsgi_app) return flask_app - -def _get_models_to_patch(): - """Return all models from OSF and addons.""" - return list( - itertools.chain( - *[ - app_config.get_models(include_auto_created=False) - for app_config in apps.get_app_configs() - if app_config.label == 'osf' or app_config.label.startswith('addons_') - ] - ) - ) - -# TODO: This won't work for modules that do e.g. `from website import models`. Rethink. -def patch_models(settings): - if not settings.USE_POSTGRES: - return - from osf import models - model_map = { - models.OSFUser: 'User', - models.AbstractNode: 'Node', - models.NodeRelation: 'Pointer', - models.BaseFileNode: 'StoredFileNode', - } - for module in sys.modules.values(): - if not module: - continue - for model_cls in _get_models_to_patch(): - model_name = model_map.get(model_cls, model_cls._meta.model.__name__) - if ( - hasattr(module, model_name) and - isinstance(getattr(module, model_name), type) and - (issubclass(getattr(module, model_name), modularodm.StoredObject) or issubclass(getattr(module, model_name), FileNode)) - ): - setattr(module, model_name, model_cls) - # Institution is a special case because it isn't a StoredObject - if hasattr(module, 'Institution') and getattr(module, 'Institution') is not models.Institution: - setattr(module, 'Institution', models.Institution) diff --git a/website/archiver/model.py b/website/archiver/model.py deleted file mode 100644 index d50cebd8012d..000000000000 --- a/website/archiver/model.py +++ /dev/null @@ -1,185 +0,0 @@ -import datetime - -from modularodm import fields - -from framework.mongo import ObjectId -from framework.mongo import StoredObject - -from website.archiver import ( - ARCHIVER_INITIATED, - ARCHIVER_SUCCESS, - ARCHIVER_FAILURE, - ARCHIVER_FAILURE_STATUSES -) - -from addons.base.models import BaseStorageAddon -from website import settings - - -class ArchiveTarget(StoredObject): - """Stores the results of archiving a single addon - """ - - _id = fields.StringField( - primary=True, - default=lambda: str(ObjectId()) - ) - - # addon_short_name of target addon - name = fields.StringField() - - status = fields.StringField(default=ARCHIVER_INITIATED) - # representation of a website.archiver.AggregateStatResult - # Format: { - # 'target_id': , - # 'target_name': , - # 'targets': (StatResult | AggregateStatResult), - # 'num_files': , - # 'disk_usage': , - # } - stat_result = fields.DictionaryField() - errors = fields.StringField(list=True) - - def __repr__(self): - return '<{0}(_id={1}, name={2}, status={3})>'.format( - self.__class__.__name__, - self._id, - self.name, - self.status - ) - - -class ArchiveJob(StoredObject): - - _id = fields.StringField( - primary=True, - default=lambda: str(ObjectId()) - ) - - # whether or not the ArchiveJob is complete (success or fail) - done = fields.BooleanField(default=False) - # whether or not emails have been sent for this ArchiveJob - sent = fields.BooleanField(default=False) - status = fields.StringField(default=ARCHIVER_INITIATED) - datetime_initiated = fields.DateTimeField(default=datetime.datetime.utcnow) - - dst_node = fields.ForeignField('node', backref='active') - src_node = fields.ForeignField('node') - initiator = fields.ForeignField('user') - - target_addons = fields.ForeignField('archivetarget', list=True) - - def __repr__(self): - return ( - '<{ClassName}(_id={self._id}, done={self.done}, ' - ' status={self.status}, src_node={self.src_node}, dst_node={self.dst_node})>' - ).format(ClassName=self.__class__.__name__, self=self) - - @property - def children(self): - return [node.archive_job for node in self.dst_node.nodes if node.primary] - - @property - def parent(self): - parent_node = self.dst_node.parent_node - return parent_node.archive_job if parent_node else None - - @property - def success(self): - return self.status == ARCHIVER_SUCCESS - - @property - def pending(self): - return any([ - target for target in self.target_addons - if target.status not in (ARCHIVER_SUCCESS, ARCHIVER_FAILURE) - ]) - - def info(self): - return self.src_node, self.dst_node, self.initiator - - def target_info(self): - return [ - { - 'name': target.name, - 'status': target.status, - 'stat_result': target.stat_result, - 'errors': target.errors - } - for target in self.target_addons - ] - - def archive_tree_finished(self): - if self.pending: - return False - if not self.children: - return True - return all([ - child.archive_tree_finished() - for child in self.children - ]) - - def _fail_above(self): - """Marks all ArchiveJob instances attached to Nodes above this as failed - """ - parent = self.parent - if parent: - parent.status = ARCHIVER_FAILURE - parent.save() - - def _post_update_target(self): - """Checks for success or failure if the ArchiveJob on self.dst_node - is finished - """ - if self.status == ARCHIVER_FAILURE: - return - if not self.pending: - self.done = True - if any([target.status for target in self.target_addons if target.status in ARCHIVER_FAILURE_STATUSES]): - self.status = ARCHIVER_FAILURE - self._fail_above() - else: - self.status = ARCHIVER_SUCCESS - self.save() - - def get_target(self, addon_short_name): - try: - return [addon for addon in self.target_addons if addon.name == addon_short_name][0] - except IndexError: - return None - - def _set_target(self, addon_short_name): - if self.get_target(addon_short_name): - return - target = ArchiveTarget(name=addon_short_name) - target.save() - self.target_addons.append(target) - - def set_targets(self): - addons = [] - for addon in [self.src_node.get_addon(name) - for name in settings.ADDONS_ARCHIVABLE - if settings.ADDONS_ARCHIVABLE[name] != 'none']: - if not addon or not addon.complete or not isinstance(addon, BaseStorageAddon): - continue - archive_errors = getattr(addon, 'archive_errors', None) - if not archive_errors or (archive_errors and not archive_errors()): - if addon.config.short_name == 'dataverse': - addons.append(addon.config.short_name + '-draft') - addons.append(addon.config.short_name + '-published') - else: - addons.append(addon.config.short_name) - for addon in addons: - self._set_target(addon) - self.save() - - def update_target(self, addon_short_name, status, stat_result=None, errors=None): - stat_result = stat_result or {} - errors = errors or [] - - target = self.get_target(addon_short_name) - target.status = status - target.errors = errors - target.stat_result = stat_result - target.save() - self._post_update_target() diff --git a/website/archiver/utils.py b/website/archiver/utils.py index e2640727f7d4..1516a5669b6d 100644 --- a/website/archiver/utils.py +++ b/website/archiver/utils.py @@ -161,7 +161,7 @@ def aggregate_file_tree_metadata(addon_short_name, fileobj_metadata, user): ) def before_archive(node, user): - from website.archiver.model import ArchiveJob + from osf.models import ArchiveJob link_archive_provider(node, user) job = ArchiveJob.objects.create( src_node=node.registered_from, diff --git a/website/conferences/model.py b/website/conferences/model.py deleted file mode 100644 index 27b2eabaf41d..000000000000 --- a/website/conferences/model.py +++ /dev/null @@ -1,70 +0,0 @@ -# -*- coding: utf-8 -*- - -import bson -from modularodm import fields, Q -from modularodm.exceptions import ModularOdmException - -from framework.mongo import StoredObject -from website.conferences.exceptions import ConferenceError - -DEFAULT_FIELD_NAMES = { - 'submission1': 'poster', - 'submission2': 'talk', - 'submission1_plural': 'posters', - 'submission2_plural': 'talks', - 'meeting_title_type': 'Posters & Talks', - 'add_submission': 'poster or talk', - 'mail_subject': 'Presentation title', - 'mail_message_body': 'Presentation abstract (if any)', - 'mail_attachment': 'Your presentation file (e.g., PowerPoint, PDF, etc.)', - 'homepage_link_text': 'Conference homepage', -} - - -class Conference(StoredObject): - #: Determines the email address for submission and the OSF url - # Example: If endpoint is spsp2014, then submission email will be - # spsp2014-talk@osf.io or spsp2014-poster@osf.io and the OSF url will - # be osf.io/view/spsp2014 - endpoint = fields.StringField(primary=True, required=True, unique=True) - #: Full name, e.g. "SPSP 2014" - name = fields.StringField(required=True) - info_url = fields.StringField(required=False, default=None) - logo_url = fields.StringField(required=False, default=None) - location = fields.StringField(required=False, default=None) - start_date = fields.DateTimeField(default=None) - end_date = fields.DateTimeField(default=None) - is_meeting = fields.BooleanField(required=True) - active = fields.BooleanField(required=True) - admins = fields.ForeignField('user', list=True, required=False, default=None) - #: Whether to make submitted projects public - public_projects = fields.BooleanField(required=False, default=True) - poster = fields.BooleanField(default=True) - talk = fields.BooleanField(default=True) - # field_names are used to customize the text on the conference page, the categories - # of submissions, and the email adress to send material to. - field_names = fields.DictionaryField(default=lambda: DEFAULT_FIELD_NAMES) - - # Cached number of submissions - num_submissions = fields.IntegerField(default=0) - - def __repr__(self): - return ( - ''.format(self=self) - ) - - @classmethod - def get_by_endpoint(cls, endpoint, active=True): - query = Q('endpoint', 'iexact', endpoint) - if active: - query &= Q('active', 'eq', True) - try: - return Conference.find_one(query) - except ModularOdmException: - raise ConferenceError('Endpoint {0} not found'.format(endpoint)) - - -class MailRecord(StoredObject): - _id = fields.StringField(primary=True, default=lambda: str(bson.ObjectId())) - data = fields.DictionaryField() - records = fields.AbstractForeignField(list=True) diff --git a/website/conferences/utils.py b/website/conferences/utils.py index 6057af75f9e0..4c0a5943525e 100644 --- a/website/conferences/utils.py +++ b/website/conferences/utils.py @@ -9,7 +9,7 @@ from website import util from website import settings from website.project import new_node -from website.models import Node, MailRecord +from osf.models import AbstractNode as Node, MailRecord def record_message(message, nodes_created, users_created): diff --git a/website/conferences/views.py b/website/conferences/views.py index b7694ef830e3..2e1d19d29ec6 100644 --- a/website/conferences/views.py +++ b/website/conferences/views.py @@ -8,18 +8,17 @@ from modularodm import Q from modularodm.exceptions import ModularOdmException +from addons.osfstorage.models import OsfStorageFile from framework.auth import get_or_create_user from framework.exceptions import HTTPError from framework.flask import redirect from framework.transactions.handlers import no_auto_transaction +from osf.models import AbstractNode as Node, Conference, Tag from website import settings from website.conferences import utils, signals from website.conferences.message import ConferenceMessage, ConferenceError -from website.conferences.model import Conference -from website.files.models import OsfStorageFile from website.mails import CONFERENCE_SUBMITTED, CONFERENCE_INACTIVE, CONFERENCE_FAILED from website.mails import send_mail -from website.models import Node, Tag from website.util import web_url_for logger = logging.getLogger(__name__) diff --git a/website/files/__init__.py b/website/files/__init__.py index f2129c492923..ce824edb7f1c 100644 --- a/website/files/__init__.py +++ b/website/files/__init__.py @@ -1,3 +1 @@ -from website.files import models # noqa - # Note tests are located in both tests.test_websitefiles and website.addons.osfstorage.tests diff --git a/website/files/models/__init__.py b/website/files/models/__init__.py deleted file mode 100644 index c2224695cb10..000000000000 --- a/website/files/models/__init__.py +++ /dev/null @@ -1,11 +0,0 @@ -from website.files.models.base import * # noqa - -from website.files.models.s3 import * # noqa -from website.files.models.box import * # noqa -from website.files.models.github import * # noqa -from website.files.models.dropbox import * # noqa -from website.files.models.figshare import * # noqa -from website.files.models.dataverse import * # noqa -from website.files.models.osfstorage import * # noqa -from website.files.models.owncloud import * # noqa -from website.files.models.googledrive import * # noqa diff --git a/website/files/models/base.py b/website/files/models/base.py deleted file mode 100644 index 35520a15e8dc..000000000000 --- a/website/files/models/base.py +++ /dev/null @@ -1,901 +0,0 @@ -from __future__ import unicode_literals - -import os -import bson -import logging -import pymongo -import requests -import functools - -from django.utils import timezone -from modularodm import fields, Q -from modularodm.exceptions import NoResultsFound -from dateutil.parser import parse as parse_date - -from framework.guid.model import Guid -from framework.mongo import StoredObject -from framework.mongo.utils import unique_on -from framework.analytics import get_basic_counters - -from website import util -from website.files import utils -from website.files import exceptions -from website.project.commentable import Commentable - - -__all__ = ( - 'File', - 'Folder', - 'FileNode', - 'FileVersion', - 'StoredFileNode', - 'TrashedFileNode', -) - - -PROVIDER_MAP = {} -logger = logging.getLogger(__name__) - - -class TrashedFileNode(StoredObject, Commentable): - """The graveyard for all deleted FileNodes""" - - __indices__ = [{ - 'unique': False, - 'key_or_list': [ - ('node', pymongo.ASCENDING), - ('is_file', pymongo.ASCENDING), - ('provider', pymongo.ASCENDING), - ] - }] - - _id = fields.StringField(primary=True) - - last_touched = fields.DateTimeField() - history = fields.DictionaryField(list=True) - versions = fields.ForeignField('FileVersion', list=True) - - node = fields.ForeignField('node', required=True) - parent = fields.AbstractForeignField(default=None) - - is_file = fields.BooleanField(default=True) - provider = fields.StringField(required=True) - - name = fields.StringField(required=True) - path = fields.StringField(required=True) - materialized_path = fields.StringField(required=True) - - checkout = fields.AbstractForeignField('User') - deleted_by = fields.AbstractForeignField('User') - deleted_on = fields.DateTimeField(auto_now_add=True) - tags = fields.ForeignField('Tag', list=True) - suspended = fields.BooleanField(default=False) - - copied_from = fields.ForeignField('StoredFileNode', default=None) - - @property - def deep_url(self): - """Allows deleted files to resolve to a view - that will provide a nice error message and http.GONE - """ - return self.node.web_url_for('addon_deleted_file', trashed_id=self._id) - - # For Comment API compatibility - @property - def target_type(self): - """The object "type" used in the OSF v2 API.""" - return 'files' - - @property - def root_target_page(self): - """The comment page type associated with TrashedFileNodes.""" - return 'files' - - @property - def is_deleted(self): - return True - - def belongs_to_node(self, node_id): - """Check whether the file is attached to the specified node.""" - return self.node._id == node_id - - def get_extra_log_params(self, comment): - return {'file': {'name': self.name, 'url': comment.get_comment_page_url()}} - - def restore(self, recursive=True, parent=None): - """Recreate a StoredFileNode from the data in this object - Will re-point all guids and finally remove itself - :raises KeyExistsException: - """ - data = self.to_storage() - data.pop('deleted_on') - data.pop('deleted_by') - data.pop('suspended') - if parent: - data['parent'] = parent._id - elif data['parent']: - # parent is an AbstractForeignField, so it gets stored as tuple - data['parent'] = data['parent'][0] - restored = FileNode.resolve_class(self.provider, int(self.is_file))(**data) - if not restored.parent: - raise ValueError('No parent to restore to') - restored.save() - - # repoint guid - for guid in Guid.find(Q('referent', 'eq', self)): - guid.referent = restored - guid.save() - - if recursive: - for child in TrashedFileNode.find(Q('parent', 'eq', self)): - child.restore(recursive=recursive, parent=restored) - - TrashedFileNode.remove_one(self) - return restored - - def get_guid(self): - """Attempt to find a Guid that points to this object. - - :rtype: Guid or None - """ - try: - # Note sometimes multiple GUIDs can exist for - # a single object. Just go with the first one - return Guid.find(Q('referent', 'eq', self))[0] - except IndexError: - return None - -@unique_on(['node', 'name', 'parent', 'is_file', 'provider', 'path']) -class StoredFileNode(StoredObject, Commentable): - """The storage backend for FileNode objects. - This class should generally not be used or created manually as FileNode - contains all the helpers required. - A FileNode wraps a StoredFileNode to provider usable abstraction layer - """ - - __indices__ = [{ - 'unique': False, - 'key_or_list': [ - ('path', pymongo.ASCENDING), - ('node', pymongo.ASCENDING), - ('is_file', pymongo.ASCENDING), - ('provider', pymongo.ASCENDING), - ] - }, { - 'unique': False, - 'key_or_list': [ - ('node', pymongo.ASCENDING), - ('is_file', pymongo.ASCENDING), - ('provider', pymongo.ASCENDING), - ] - }, { - 'unique': False, - 'key_or_list': [ - ('parent', pymongo.ASCENDING), - ] - }] - - _id = fields.StringField(primary=True, default=lambda: str(bson.ObjectId())) - - # The last time the touch method was called on this FileNode - last_touched = fields.DateTimeField() - # A list of dictionaries sorted by the 'modified' key - # The raw output of the metadata request deduped by etag - # Add regardless it can be pinned to a version or not - history = fields.DictionaryField(list=True) - # A concrete version of a FileNode, must have an identifier - versions = fields.ForeignField('FileVersion', list=True) - - node = fields.ForeignField('Node', required=True) - parent = fields.ForeignField('StoredFileNode', default=None) - copied_from = fields.ForeignField('StoredFileNode', default=None) - - is_file = fields.BooleanField(default=True) - provider = fields.StringField(required=True) - - name = fields.StringField(required=True) - path = fields.StringField(required=True) - materialized_path = fields.StringField(required=True) - - # The User that has this file "checked out" - # Should only be used for OsfStorage - checkout = fields.AbstractForeignField('User') - - #Tags for a file, currently only used for osfStorage - tags = fields.ForeignField('Tag', list=True) - - # For Django compatibility - @property - def pk(self): - return self._id - - # For Django compatibility - # TODO Find a better way - @property - def node_id(self): - return self.node._id - - @property - def deep_url(self): - return self.deep_url - - @property - def absolute_api_v2_url(self): - path = '/files/{}/'.format(self._id) - return util.api_v2_url(path) - - # For Comment API compatibility - @property - def target_type(self): - """The object "type" used in the OSF v2 API.""" - return 'files' - - @property - def root_target_page(self): - """The comment page type associated with StoredFileNodes.""" - return 'files' - - @property - def is_deleted(self): - if self.provider == 'osfstorage': - return False - - def belongs_to_node(self, node_id): - """Check whether the file is attached to the specified node.""" - return self.node._id == node_id - - def get_extra_log_params(self, comment): - return {'file': {'name': self.name, 'url': comment.get_comment_page_url()}} - - # used by django and DRF - def get_absolute_url(self): - return self.absolute_api_v2_url - - def wrapped(self): - """Wrap self in a FileNode subclass - """ - return FileNode.resolve_class(self.provider, int(self.is_file))(self) - - def get_guid(self, create=False): - """Attempt to find a Guid that points to this object. - One will be created if requested. - - :param Boolean create: Should we generate a GUID if there isn't one? Default: False - :rtype: Guid or None - """ - try: - # Note sometimes multiple GUIDs can exist for - # a single object. Just go with the first one - return Guid.find(Q('referent', 'eq', self))[0] - except IndexError: - if not create: - return None - return Guid.generate(self) - - -class FileNodeMeta(type): - """Keeps track of subclasses of the ``FileNode`` object - Inserts all into the PROVIDER_MAP following the pattern: - { - provider: [ProviderFolder, ProviderFile, ProviderFileNode] - } - """ - - def __init__(cls, name, bases, dct): - super(FileNodeMeta, cls).__init__(name, bases, dct) - if hasattr(cls, 'provider'): - cls_map = PROVIDER_MAP.setdefault(cls.provider, [None, None, None]) - index = int(getattr(cls, 'is_file', 2)) - - if cls_map[index] is not None: - raise ValueError('Conflicting providers') - - cls_map[index] = cls - - -class FileNode(object): - """The base class for the entire files storage system. - Use for querying on all files and folders in the database - Note: This is a proxy object for StoredFileNode - """ - FOLDER, FILE, ANY = 0, 1, 2 - - __metaclass__ = FileNodeMeta - - @classmethod - def create(cls, **kwargs): - """A layer of abstraction around the creation of FileNodes. - Provides hook in points for subclasses - This is used only for GUID creation. - """ - assert hasattr(cls, 'is_file') and hasattr(cls, 'provider'), 'Must have is_file and provider to call create' - kwargs['is_file'] = cls.is_file - kwargs['provider'] = cls.provider - return cls(**kwargs) - - @classmethod - def get_or_create(cls, node, path): - """Tries to find a FileNode with node and path - See FileNode.create - Note: Osfstorage overrides this method due to odd database constraints - """ - path = '/' + path.lstrip('/') - try: - # Note: Possible race condition here - # Currently create then find is not super feasable as create would require a - # call to save which we choose not to call to avoid filling the database - # with notfound/googlebot files/url. Raising 404 errors may roll back the transaction however - return cls.find_one(Q('node', 'eq', node) & Q('path', 'eq', path)) - except NoResultsFound: - return cls.create(node=node, path=path) - - @classmethod - def get_file_guids(cls, materialized_path, provider, node): - guids = [] - materialized_path = '/' + materialized_path.lstrip('/') - if materialized_path.endswith('/'): - folder_children = cls.find(Q('provider', 'eq', provider) & - Q('node', 'eq', node) & - Q('materialized_path', 'startswith', materialized_path)) - for item in folder_children: - if item.kind == 'file': - guid = item.get_guid() - if guid: - guids.append(guid._id) - else: - try: - file_obj = cls.find_one(Q('node', 'eq', node) & Q('materialized_path', 'eq', materialized_path)) - except NoResultsFound: - return guids - guid = file_obj.get_guid() - if guid: - guids.append(guid._id) - - return guids - - @classmethod - def resolve_class(cls, provider, _type=2): - """Resolve a provider and type to the appropriate subclass. - Usage: - >>> FileNode.resolve_class('box', FileNode.ANY) # BoxFileNode - >>> FileNode.resolve_class('dropbox', FileNode.FILE) # DropboxFile - :rtype: Subclass of FileNode - """ - try: - return PROVIDER_MAP[provider][int(_type)] - except IndexError: - raise exceptions.SubclassNotFound('_type must be 0, 1, or 2') - except KeyError: - raise exceptions.SubclassNotFound(provider) - - @classmethod - def _filter(cls, qs=None): - """Creates an odm query to limit the scope of whatever search method - to the given class. - :param qs RawQuery: An odm query or None - :rtype: RawQuery or None - """ - # Build a list of all possible contraints leaving None when appropriate - # filter(None, ...) removes all falsey values - qs = filter(None, (qs, - Q('is_file', 'eq', cls.is_file) if hasattr(cls, 'is_file') else None, - Q('provider', 'eq', cls.provider) if hasattr(cls, 'provider') else None, - )) - # If out list is empty return None; there's no filters to be applied - if not qs: - return None - # Use reduce to & together all our queries. equavilent to: - # return q1 & q2 ... & qn - return functools.reduce(lambda q1, q2: q1 & q2, qs) - - @classmethod - def find(cls, qs=None): - """A proxy for StoredFileNode.find but applies class based contraints. - Wraps The MongoQuerySet in a GenWrapper this overrides the __iter__ of - MongoQuerySet to return wrapped objects - :rtype: GenWrapper> - """ - return utils.GenWrapper(StoredFileNode.find(cls._filter(qs))) - - @classmethod - def find_one(cls, qs): - """A proxy for StoredFileNode.find_one but applies class based contraints. - :rtype: cls - """ - return StoredFileNode.find_one(cls._filter(qs)) - - @classmethod - def files_checked_out(cls, user): - """ - :param user: The user with checkedout files - :return: A queryset of all FileNodes checked out by user - """ - return cls.find(Q('checkout', 'eq', user)) - - @classmethod - def load(cls, _id): - """A proxy for StoredFileNode.load requires the wrapped version of the found value - to be an instance of cls. - :rtype: cls - """ - inst = StoredFileNode.load(_id) - if not inst: - return None - assert isinstance(inst, cls), 'Loaded object {} is not of type {}'.format(inst, cls) - return inst - - @property - def parent(self): - """A proxy to self.stored_object.parent but forces it to be wrapped. - """ - if self.stored_object.parent: - return self.stored_object.parent - return None - - @parent.setter - def parent(self, val): - """A proxy to self.stored_object.parent but will unwrap it when need be - """ - if isinstance(val, FileNode): - val = val.stored_object - self.stored_object.parent = val - - @property - def copied_from(self): - if self.stored_object.copied_from: - return self.stored_object.copied_from - return None - - @copied_from.setter - def copied_from(self, val): - if isinstance(val, FileNode): - val = val.stored_object - self.stored_object.copied_from = val - - @property - def deep_url(self): - """The url that this filenodes guid should resolve to. - Implemented here so that subclasses may override it or path. - See OsfStorage or PathFollowingNode. - """ - return self.node.web_url_for('addon_view_or_download_file', provider=self.provider, path=self.path.strip('/')) - - @property - def kind(self): - """Whether this FileNode is a file or folder as a string. - Used for serialization and backwards compatability - :rtype: str - :returns: 'file' or 'folder' - """ - return 'file' if self.is_file else 'folder' - - def __init__(self, *args, **kwargs): - """Contructor for FileNode's subclasses - If called with only a StoredFileNode it will be attached to self - Otherwise: - Injects provider and is_file when appropriate. - Creates a new StoredFileNode with kwargs, not saved. - Then attaches stored_object to self - """ - if args and isinstance(args[0], StoredFileNode): - assert len(args) == 1 - assert len(kwargs) == 0 - self.stored_object = args[0] - else: - if hasattr(self, 'provider'): - kwargs['provider'] = self.provider - if hasattr(self, 'is_file'): - kwargs['is_file'] = self.is_file - self.stored_object = StoredFileNode(*args, **kwargs) - - def save(self): - """A proxy to self.stored_object.save. - Implemented top level so that child class may override it - and just call super.save rather than self.stored_object.save - """ - return self.stored_object.save() - - def serialize(self, **kwargs): - return { - 'id': self._id, - 'path': self.path, - 'name': self.name, - 'kind': self.kind, - } - - def generate_waterbutler_url(self, **kwargs): - return util.waterbutler_api_url_for( - self.node._id, - self.provider, - self.path, - **kwargs - ) - - def delete(self, user=None, parent=None): - """Move self into the TrashedFileNode collection - and remove it from StoredFileNode - :param user User or None: The user that deleted this FileNode - """ - trashed = self._create_trashed(user=user, parent=parent) - self._repoint_guids(trashed) - self.node.save() - StoredFileNode.remove_one(self.stored_object) - return trashed - - def copy_under(self, destination_parent, name=None): - return utils.copy_files(self, destination_parent.node, destination_parent, name=name) - - def move_under(self, destination_parent, name=None): - self.name = name or self.name - self.parent = destination_parent.stored_object - self._update_node(save=True) # Trust _update_node to save us - - return self - - def update(self, revision, data, save=True, user=None): - """Note: User is a kwargs here because of special requirements of - dataverse and django - See dataversefile.update - """ - self.name = data['name'] - self.materialized_path = data['materialized'] - self.last_touched = timezone.now() - if save: - self.save() - - def _create_trashed(self, save=True, user=None, parent=None): - trashed = TrashedFileNode( - _id=self._id, - name=self.name, - path=self.path, - node=self.node, - parent=parent or self.parent, - history=self.history, - is_file=self.is_file, - checkout=self.checkout, - provider=self.provider, - versions=self.versions, - last_touched=self.last_touched, - materialized_path=self.materialized_path, - - deleted_by=user - ) - if save: - trashed.save() - return trashed - - def _repoint_guids(self, updated): - for guid in Guid.find(Q('referent', 'eq', self)): - guid.referent = updated - guid.save() - - def _update_node(self, recursive=True, save=True): - if self.parent is not None: - self.node = self.parent.node - if save: - self.save() - if recursive and not self.is_file: - for child in self.children: - child._update_node(save=save) - - def __getattr__(self, name): - """For the purpose of proxying all calls to the below stored_object - Saves typing out ~10 properties or so - """ - if 'stored_object' in self.__dict__: - try: - return getattr(self.stored_object, name) - except AttributeError: - pass # Avoids error message about the underlying object - return object.__getattribute__(self, name) - - def __setattr__(self, name, val): - # Property setters are called after __setattr__ is called - # If the requested attribute is a property with a setter go ahead and use it - maybe_prop = getattr(self.__class__, name, None) - if isinstance(maybe_prop, property) and maybe_prop.fset is not None: - return object.__setattr__(self, name, val) - if 'stored_object' in self.__dict__: - return setattr(self.stored_object, name, val) - return object.__setattr__(self, name, val) - - def __eq__(self, other): - return self.stored_object == getattr(other, 'stored_object', None) - - def __repr__(self): - return '<{}(name={!r}, node={!r})>'.format( - self.__class__.__name__, - self.stored_object.name, - self.stored_object.node - ) - - -class File(FileNode): - is_file = True - version_identifier = 'revision' # For backwards compatability - - def get_version(self, revision, required=False): - """Find a version with identifier revision - :returns: FileVersion or None - :raises: VersionNotFoundError if required is True - """ - for version in reversed(self.versions): - if version.identifier == revision: - break - else: - if required: - raise exceptions.VersionNotFoundError(revision) - return None - return version - - def update_version_metadata(self, location, metadata): - for version in reversed(self.versions): - if version.location == location: - version.update_metadata(metadata) - return - raise exceptions.VersionNotFoundError(location) - - def touch(self, auth_header, revision=None, **kwargs): - """The bread and butter of File, collects metadata about self - and creates versions and updates self when required. - If revisions is None the created version is NOT and should NOT be saved - as there is no identifing information to tell if it needs to be updated or not. - Hits Waterbutler's metadata endpoint and saves the returned data. - If a file cannot be rendered IE figshare private files a tuple of the FileVersion and - renderable HTML will be returned. - >>>isinstance(file_node.touch(), tuple) # This file cannot be rendered - :param str or None auth_header: If truthy it will set as the Authorization header - :returns: None if the file is not found otherwise FileVersion or (version, Error HTML) - """ - # Resvolve primary key on first touch - self.save() - # For backwards compatability - revision = revision or kwargs.get(self.version_identifier) - - version = self.get_version(revision) - # Versions do not change. No need to refetch what we already know - if version is not None: - return version - - headers = {} - if auth_header: - headers['Authorization'] = auth_header - - resp = requests.get( - self.generate_waterbutler_url(revision=revision, meta=True, **kwargs), - headers=headers, - ) - if resp.status_code != 200: - logger.warning('Unable to find {} got status code {}'.format(self, resp.status_code)) - return None - return self.update(revision, resp.json()['data']['attributes']) - # TODO Switch back to head requests - # return self.update(revision, json.loads(resp.headers['x-waterbutler-metadata'])) - - def update(self, revision, data, user=None): - """Using revision and data update all data pretaining to self - :param str or None revision: The revision that data points to - :param dict data: Metadata recieved from waterbutler - :returns: FileVersion - """ - self.name = data['name'] - self.materialized_path = data['materialized'] - - version = FileVersion(identifier=revision) - version.update_metadata(data, save=False) - - # Transform here so it can be sortted on later - if data['modified'] is not None and data['modified'] != '': - data['modified'] = parse_date( - data['modified'], - ignoretz=True, - default=timezone.now() # Just incase nothing can be parsed - ) - - # if revision is none then version is the latest version - # Dont save the latest information - if revision is not None: - version.save() - self.versions.add(version) - for entry in self.history: - if ('etag' in entry and 'etag' in data) and (entry['etag'] == data['etag']): - break - else: - # Insert into history if there is no matching etag - utils.insort(self.history, data, lambda x: x['modified']) - - # Finally update last touched - self.last_touched = timezone.now() - - self.save() - return version - - def get_download_count(self, version=None): - """Pull the download count from the pagecounter collection - Limit to version if specified. - Currently only useful for OsfStorage - """ - parts = ['download', self.node._id, self._id] - if version is not None: - parts.append(version) - page = ':'.join([format(part) for part in parts]) - _, count = get_basic_counters(page) - - return count or 0 - - def serialize(self): - if not self.versions: - return dict( - super(File, self).serialize(), - size=None, - version=None, - modified=None, - created=None, - contentType=None, - downloads=self.get_download_count(), - checkout=self.checkout._id if self.checkout else None, - ) - - version = self.versions[-1] - return dict( - super(File, self).serialize(), - size=version.size, - downloads=self.get_download_count(), - checkout=self.checkout._id if self.checkout else None, - version=version.identifier if self.versions else None, - contentType=version.content_type if self.versions else None, - modified=version.date_modified.isoformat() if version.date_modified else None, - created=self.versions[0].date_modified.isoformat() if self.versions[0].date_modified else None, - ) - - -class Folder(FileNode): - is_file = False - - @property - def children(self): - """Finds all Filenodes that view self as a parent - :returns: A GenWrapper for all children - :rtype: GenWrapper> - """ - return FileNode.find(Q('parent', 'eq', self._id)) - - def delete(self, recurse=True, user=None, parent=None): - trashed = self._create_trashed(user=user, parent=parent) - if recurse: - for child in self.children: - child.delete(user=user, parent=trashed) - self._repoint_guids(trashed) - StoredFileNode.remove_one(self.stored_object) - return trashed - - def append_file(self, name, path=None, materialized_path=None, save=True): - return self._create_child(name, FileNode.FILE, path=path, materialized_path=materialized_path, save=save) - - def append_folder(self, name, path=None, materialized_path=None, save=True): - return self._create_child(name, FileNode.FOLDER, path=path, materialized_path=materialized_path, save=save) - - def _create_child(self, name, kind, path=None, materialized_path=None, save=True): - child = FileNode.resolve_class(self.provider, kind)( - name=name, - node=self.node, - path=path or '/' + name, - parent=self.stored_object, - materialized_path=materialized_path or - os.path.join(self.materialized_path, name) + '/' if not kind else '' - ) - if save: - child.save() - return child - - def find_child_by_name(self, name, kind=2): - return FileNode.resolve_class(self.provider, kind).find_one( - Q('name', 'eq', name) & - Q('parent', 'eq', self) - ) - - -class FileVersion(StoredObject): - """A version of an OsfStorageFileNode. contains information - about where the file is located, hashes and datetimes - """ - - __indices__ = [{ - 'unique': False, - 'key_or_list': [ - ('_id', pymongo.ASCENDING), - ('metadata.vault', pymongo.ASCENDING), - ('metadata.archive', pymongo.ASCENDING), - ('metadata.sha256', pymongo.ASCENDING), - ] - }] - - _id = fields.StringField(primary=True, default=lambda: str(bson.ObjectId())) - - creator = fields.ForeignField('user') - - identifier = fields.StringField(required=True) - - # Date version record was created. This is the date displayed to the user. - date_created = fields.DateTimeField(auto_now_add=True) - - # Dictionary specifying all information needed to locate file on backend - # { - # 'service': 'cloudfiles', # required - # 'container': 'osf', # required - # 'object': '20c53b', # required - # 'worker_url': '127.0.0.1', - # 'worker_host': 'upload-service-1', - # } - location = fields.DictionaryField(default=None, validate=utils.validate_location) - - # Dictionary containing raw metadata from upload service response - # { - # 'size': 1024, # required - # 'content_type': 'text/plain', # required - # 'date_modified': '2014-11-07T20:24:15', # required - # 'md5': 'd077f2', - # } - metadata = fields.DictionaryField() - - size = fields.IntegerField() - content_type = fields.StringField() - # Date file modified on third-party backend. Not displayed to user, since - # this date may be earlier than the date of upload if the file already - # exists on the backend - date_modified = fields.DateTimeField() - - @property - def location_hash(self): - return self.location['object'] - - @property - def archive(self): - return self.metadata.get('archive') - - def is_duplicate(self, other): - return self.location_hash == other.location_hash - - def update_metadata(self, metadata, save=True): - self.metadata.update(metadata) - # metadata has no defined structure so only attempt to set attributes - # If its are not in this callback it'll be in the next - self.size = self.metadata.get('size', self.size) - self.content_type = self.metadata.get('contentType', self.content_type) - if self.metadata.get('modified'): - # TODO handle the timezone here the user that updates the file may see an - # Incorrect version - self.date_modified = parse_date(self.metadata['modified'], ignoretz=True) - - if save: - self.save() - - def _find_matching_archive(self, save=True): - """Find another version with the same sha256 as this file. - If found copy its vault name and glacier id, no need to create additional backups. - returns True if found otherwise false - """ - if 'sha256' not in self.metadata: - return False # Dont bother searching for nothing - - if 'vault' in self.metadata and 'archive' in self.metadata: - # Shouldn't ever happen, but we already have an archive - return True # We've found ourself - - qs = self.__class__.find( - Q('_id', 'ne', self._id) & - Q('metadata.vault', 'ne', None) & - Q('metadata.archive', 'ne', None) & - Q('metadata.sha256', 'eq', self.metadata['sha256']) - ).limit(1) - if qs.count() < 1: - return False - other = qs[0] - try: - self.metadata['vault'] = other.metadata['vault'] - self.metadata['archive'] = other.metadata['archive'] - except KeyError: - return False - if save: - self.save() - return True diff --git a/website/files/models/box.py b/website/files/models/box.py deleted file mode 100644 index 2766524f96b8..000000000000 --- a/website/files/models/box.py +++ /dev/null @@ -1,16 +0,0 @@ -from website.files.models.base import File, Folder, FileNode - - -__all__ = ('BoxFile', 'BoxFolder', 'BoxFileNode') - - -class BoxFileNode(FileNode): - provider = 'box' - - -class BoxFolder(BoxFileNode, Folder): - pass - - -class BoxFile(BoxFileNode, File): - pass diff --git a/website/files/models/dataverse.py b/website/files/models/dataverse.py deleted file mode 100644 index 696b13253dc3..000000000000 --- a/website/files/models/dataverse.py +++ /dev/null @@ -1,44 +0,0 @@ -from framework.auth.core import _get_current_user - -from website.files.models.base import File, Folder, FileNode, FileVersion - - -__all__ = ('DataverseFile', 'DataverseFolder', 'DataverseFileNode') - - -class DataverseFileNode(FileNode): - provider = 'dataverse' - - -class DataverseFolder(DataverseFileNode, Folder): - pass - - -class DataverseFile(DataverseFileNode, File): - version_identifier = 'version' - - def update(self, revision, data, user=None): - """Note: Dataverse only has psuedo versions, don't save them - Dataverse requires a user for the weird check below - and Django dies when _get_current_user is called - """ - self.name = data['name'] - self.materialized_path = data['materialized'] - self.save() - - version = FileVersion(identifier=revision) - version.update_metadata(data, save=False) - - user = user or _get_current_user() - if not user or not self.node.can_edit(user=user): - try: - # Users without edit permission can only see published files - if not data['extra']['hasPublishedVersion']: - # Blank out name and path for the render - # Dont save because there's no reason to persist the change - self.name = '' - self.materialized_path = '' - return (version, '') - except (KeyError, IndexError): - pass - return version diff --git a/website/files/models/dropbox.py b/website/files/models/dropbox.py deleted file mode 100644 index 9f90b63584fe..000000000000 --- a/website/files/models/dropbox.py +++ /dev/null @@ -1,17 +0,0 @@ -from website.files.models.base import File, Folder -from website.files.models.ext import PathFollowingFileNode - - -__all__ = ('DropboxFile', 'DropboxFolder', 'DropboxFileNode') - - -class DropboxFileNode(PathFollowingFileNode): - provider = 'dropbox' - - -class DropboxFolder(DropboxFileNode, Folder): - pass - - -class DropboxFile(DropboxFileNode, File): - pass diff --git a/website/files/models/ext.py b/website/files/models/ext.py deleted file mode 100644 index 3addefb39b93..000000000000 --- a/website/files/models/ext.py +++ /dev/null @@ -1,39 +0,0 @@ -"""website.files.models.ext is home to subclasses of FileNode that provide -additional functionality and have no place in website.files.models.base -""" -import os - -from website.files.models.base import FileNode - - -class PathFollowingFileNode(FileNode): - """A helper class that will attempt to track the its file - through changes in the parent addons settings - ie: Moving you dropbox director up or down X levels - stored_object's path will always be the full path - from the providers root directory - """ - - FOLDER_ATTR_NAME = 'folder' - - @classmethod - def get_or_create(cls, node, path): - """Forces path to extend to the add-on's root directory - """ - node_settings = node.get_addon(cls.provider) - path = os.path.join(getattr(node_settings, cls.FOLDER_ATTR_NAME).strip('/'), path.lstrip('/')) - return super(PathFollowingFileNode, cls).get_or_create(node, '/' + path) - - @property - def path(self): - """Mutates the underlying stored_object's path to be relative to _get_connected_path - """ - return '/' + self.stored_object.path.replace(self._get_connected_path(), '', 1).lstrip('/') - - def _get_connected_path(self): - """Returns the path of the connected provider add-on - >>> pffn._get_connected_path() # /MyDropbox/FolderImSharingOnTheOsf - """ - node_settings = self.node.get_addon(self.provider) - assert node_settings is not None, 'Connected node has no {} account'.format(self.provider) - return getattr(node_settings, self.FOLDER_ATTR_NAME).strip('/') diff --git a/website/files/models/figshare.py b/website/files/models/figshare.py deleted file mode 100644 index bc462f3e7c60..000000000000 --- a/website/files/models/figshare.py +++ /dev/null @@ -1,48 +0,0 @@ -import markupsafe - -from website.files.models.base import File, Folder, FileNode, FileVersion - - -__all__ = ('FigshareFile', 'FigshareFolder', 'FigshareFileNode') - - -class FigshareFileNode(FileNode): - provider = 'figshare' - - -class FigshareFolder(FigshareFileNode, Folder): - pass - - -class FigshareFile(FigshareFileNode, File): - - def touch(self, bearer, revision=None, **kwargs): - return super(FigshareFile, self).touch(bearer, revision=None, **kwargs) - - def update(self, revision, data, user=None): - """Figshare does not support versioning. - Always pass revision as None to avoid conflict. - """ - self.name = data['name'] - self.materialized_path = data['materialized'] - self.save() - - version = FileVersion(identifier=None) - version.update_metadata(data, save=False) - - # Draft files are not renderable - if data['extra']['status'] == 'drafts': - return (version, u''' - - - '''.format(name=markupsafe.escape(self.name))) - - return version diff --git a/website/files/models/github.py b/website/files/models/github.py deleted file mode 100644 index fa8a296dd255..000000000000 --- a/website/files/models/github.py +++ /dev/null @@ -1,20 +0,0 @@ -from website.files.models.base import File, Folder, FileNode - - -__all__ = ('GithubFile', 'GithubFolder', 'GithubFileNode') - - -class GithubFileNode(FileNode): - provider = 'github' - - -class GithubFolder(GithubFileNode, Folder): - pass - - -class GithubFile(GithubFileNode, File): - version_identifier = 'ref' - - def touch(self, auth_header, revision=None, ref=None, branch=None, **kwargs): - revision = revision or ref or branch - return super(GithubFile, self).touch(auth_header, revision=revision, **kwargs) diff --git a/website/files/models/googledrive.py b/website/files/models/googledrive.py deleted file mode 100644 index db4e83cf9094..000000000000 --- a/website/files/models/googledrive.py +++ /dev/null @@ -1,22 +0,0 @@ -from website.files.models.base import File, Folder, FileNode -# from website.files.models.ext import PathFollowingFileNode - - -__all__ = ('GoogleDriveFile', 'GoogleDriveFolder', 'GoogleDriveFileNode') - - -# TODO make googledrive "pathfollowing" -# A migration will need to be run that concats -# folder_path and filenode.path -# class GoogleDriveFileNode(PathFollowingFileNode): -class GoogleDriveFileNode(FileNode): - provider = 'googledrive' - FOLDER_ATTR_NAME = 'folder_path' - - -class GoogleDriveFolder(GoogleDriveFileNode, Folder): - pass - - -class GoogleDriveFile(GoogleDriveFileNode, File): - pass diff --git a/website/files/models/osfstorage.py b/website/files/models/osfstorage.py deleted file mode 100644 index 2086f9f6d08b..000000000000 --- a/website/files/models/osfstorage.py +++ /dev/null @@ -1,328 +0,0 @@ -from __future__ import unicode_literals - -import os - -from modularodm import Q - -from framework.auth import Auth -from framework.guid.model import Guid -from website.exceptions import InvalidTagError, NodeStateError, TagNotFoundError -from website.files import exceptions -from website.files.models.base import File, Folder, FileNode, FileVersion, TrashedFileNode -from website.util import permissions - - -__all__ = ('OsfStorageFile', 'OsfStorageFolder', 'OsfStorageFileNode') - - -class OsfStorageFileNode(FileNode): - provider = 'osfstorage' - - @classmethod - def get(cls, _id, node): - return cls.find_one(Q('_id', 'eq', _id) & Q('node', 'eq', node)) - - @classmethod - def get_or_create(cls, node, path): - """Override get or create for osfstorage - Path is always the _id of the osfstorage filenode. - Use load here as its way faster than find. - Just manually assert that node is equal to node. - """ - inst = cls.load(path.strip('/')) - # Use _id as odms default comparison mucks up sometimes - if inst and inst.node._id == node._id: - return inst - - # Dont raise anything a 404 will be raised later - return cls.create(node=node, path=path) - - @classmethod - def get_file_guids(cls, materialized_path, provider, node=None): - guids = [] - path = materialized_path.strip('/') - file_obj = cls.load(path) - if not file_obj: - file_obj = TrashedFileNode.load(path) - - # At this point, file_obj may be an OsfStorageFile, an OsfStorageFolder, or a - # TrashedFileNode. TrashedFileNodes do not have *File and *Folder subclasses, since - # only osfstorage trashes folders. To search for children of TrashFileNodes - # representing ex-OsfStorageFolders, we will reimplement the `children` method of the - # Folder class here. - if not file_obj.is_file: - children = [] - if isinstance(file_obj, TrashedFileNode): - children = TrashedFileNode.find(Q('parent', 'eq', file_obj._id)) - else: - children = file_obj.children - - for item in children: - guids.extend(cls.get_file_guids(item.path, provider, node=node)) - else: - try: - guid = Guid.find(Q('referent', 'eq', file_obj))[0] - except IndexError: - guid = None - if guid: - guids.append(guid._id) - - return guids - - @property - def kind(self): - return 'file' if self.is_file else 'folder' - - @property - def materialized_path(self): - """creates the full path to a the given filenode - Note: Possibly high complexity/ many database calls - USE SPARINGLY - """ - if not self.parent: - return '/' - # Note: ODM cache can be abused here - # for highly nested folders calling - # list(self.__class__.find(Q(nodesetting),Q(folder)) - # may result in a massive increase in performance - def lineage(): - current = self - while current: - yield current - current = current.parent - - path = os.path.join(*reversed([x.name for x in lineage()])) - if self.is_file: - return '/{}'.format(path) - return '/{}/'.format(path) - - @property - def path(self): - """Path is dynamically computed as storedobject.path is stored - as an empty string to make the unique index work properly for osfstorage - """ - return '/' + self._id + ('' if self.is_file else '/') - - @property - def is_checked_out(self): - return self.checkout is not None - - @property - def is_preprint_primary(self): - return self.node.preprint_file == self and not self.node._has_abandoned_preprint - - @property - def _delete_allowed(self): - if self.is_preprint_primary: - raise exceptions.FileNodeIsPrimaryFile() - if self.is_checked_out: - raise exceptions.FileNodeCheckedOutError() - return True - - @property - def delete_allowed(self): - try: - return self._delete_allowed - except: - return False - - def delete(self, user=None, parent=None): - return super(OsfStorageFileNode, self).delete(user=user, parent=parent) if self._delete_allowed else None - - def move_under(self, destination_parent, name=None): - if self.is_preprint_primary: - if self.node._id != destination_parent.node._id or self.provider != destination_parent.provider: - raise exceptions.FileNodeIsPrimaryFile() - if self.is_checked_out: - raise exceptions.FileNodeCheckedOutError() - return super(OsfStorageFileNode, self).move_under(destination_parent, name) - - def check_in_or_out(self, user, checkout, save=False): - """ - Updates self.checkout with the requesting user or None, - iff user has permission to check out file or folder. - Adds log to self.node. - - - :param user: User making the request - :param checkout: Either the same user or None, depending on in/out-checking - :param save: Whether or not to save the user - """ - from website.project.model import NodeLog # Avoid circular import - - if (self.is_checked_out and self.checkout != user and permissions.ADMIN not in self.node.permissions.get(user._id, []))\ - or permissions.WRITE not in self.node.get_permissions(user): - raise exceptions.FileNodeCheckedOutError() - - action = NodeLog.CHECKED_OUT if checkout else NodeLog.CHECKED_IN - - if self.is_checked_out and action == NodeLog.CHECKED_IN or not self.is_checked_out and action == NodeLog.CHECKED_OUT: - self.checkout = checkout - - self.node.add_log( - action=action, - params={ - 'kind': self.kind, - 'project': self.node.parent_id, - 'node': self.node._id, - 'urls': { - # web_url_for unavailable -- called from within the API, so no flask app - 'download': '/project/{}/files/{}/{}/?action=download'.format(self.node._id, self.provider, self._id), - 'view': '/project/{}/files/{}/{}'.format(self.node._id, self.provider, self._id)}, - 'path': self.materialized_path - }, - auth=Auth(user), - ) - - if save: - self.save() - - def save(self): - self.path = '' - self.materialized_path = '' - return super(OsfStorageFileNode, self).save() - - -class OsfStorageFile(OsfStorageFileNode, File): - - def touch(self, bearer, version=None, revision=None, **kwargs): - try: - return self.get_version(revision or version) - except ValueError: - return None - - @property - def history(self): - return [v.metadata for v in self.versions] - - def serialize(self, include_full=None, version=None): - ret = super(OsfStorageFile, self).serialize() - if include_full: - ret['fullPath'] = self.materialized_path - - version = self.get_version(version) - return dict( - ret, - version=len(self.versions), - md5=version.metadata.get('md5') if version else None, - sha256=version.metadata.get('sha256') if version else None, - ) - - def create_version(self, creator, location, metadata=None): - latest_version = self.get_version() - version = FileVersion(identifier=len(self.versions) + 1, creator=creator, location=location) - - if latest_version and latest_version.is_duplicate(version): - return latest_version - - if metadata: - version.update_metadata(metadata) - - version._find_matching_archive(save=False) - - version.save() - self.versions.append(version) - self.save() - - return version - - def get_version(self, version=None, required=False): - if version is None: - if self.versions: - return self.versions[-1] - return None - - try: - return self.versions[int(version) - 1] - except (IndexError, ValueError): - if required: - raise exceptions.VersionNotFoundError(version) - return None - - def add_tag_log(self, action, tag, auth): - node = self.node - node.add_log( - action=action, - params={ - 'parent_node': node.parent_id, - 'node': node._id, - 'urls': { - 'download': '/project/{}/files/osfstorage/{}/?action=download'.format(node._id, self._id), - 'view': '/project/{}/files/osfstorage/{}/'.format(node._id, self._id)}, - 'path': self.materialized_path, - 'tag': tag, - }, - auth=auth, - ) - - def add_tag(self, tag, auth, save=True, log=True): - from website.models import Tag, NodeLog # Prevent import error - if tag not in self.tags and not self.node.is_registration: - new_tag = Tag.load(tag) - if not new_tag: - new_tag = Tag(_id=tag) - new_tag.save() - self.tags.append(new_tag) - if log: - self.add_tag_log(NodeLog.FILE_TAG_ADDED, tag, auth) - if save: - self.save() - return True - return False - - def remove_tag(self, tag, auth, save=True, log=True): - from website.models import Tag, NodeLog # Prevent import error - if self.node.is_registration: - # Can't perform edits on a registration - raise NodeStateError - - tag = Tag.load(tag) - if not tag: - raise InvalidTagError - elif tag not in self.tags: - raise TagNotFoundError - else: - self.tags.remove(tag) - if log: - self.add_tag_log(NodeLog.FILE_TAG_REMOVED, tag._id, auth) - if save: - self.save() - return True - - def delete(self, user=None, parent=None): - from website.search import search - search.update_file(self, delete=True) - return super(OsfStorageFile, self).delete(user, parent) - - def save(self, skip_search=False): - from website.search import search - ret = super(OsfStorageFile, self).save() - if not skip_search: - search.update_file(self) - return ret - -class OsfStorageFolder(OsfStorageFileNode, Folder): - - @property - def is_checked_out(self): - if self.checkout: - return True - for child in self.children: - if child.is_checked_out: - return True - return False - - @property - def is_preprint_primary(self): - if self.node.is_preprint: - for child in self.children: - if child.is_preprint_primary: - return True - return False - - def serialize(self, include_full=False, version=None): - # Versions just for compatability - ret = super(OsfStorageFolder, self).serialize() - if include_full: - ret['fullPath'] = self.materialized_path - return ret diff --git a/website/files/models/owncloud.py b/website/files/models/owncloud.py deleted file mode 100644 index 23e1e63c8a69..000000000000 --- a/website/files/models/owncloud.py +++ /dev/null @@ -1,16 +0,0 @@ -from website.files.models.base import File, Folder, FileNode - - -__all__ = ('OwncloudFile', 'OwncloudFolder', 'OwncloudFileNode') - - -class OwncloudFileNode(FileNode): - provider = 'owncloud' - - -class OwncloudFolder(OwncloudFileNode, Folder): - pass - - -class OwncloudFile(OwncloudFileNode, File): - pass diff --git a/website/files/models/s3.py b/website/files/models/s3.py deleted file mode 100644 index 9cc6cd7ffc63..000000000000 --- a/website/files/models/s3.py +++ /dev/null @@ -1,15 +0,0 @@ -from website.files.models.base import File, Folder, FileNode - -__all__ = ('S3File', 'S3Folder', 'S3FileNode') - - -class S3FileNode(FileNode): - provider = 's3' - - -class S3Folder(S3FileNode, Folder): - pass - - -class S3File(S3FileNode, File): - version_identifier = 'version' diff --git a/website/identifiers/model.py b/website/identifiers/model.py deleted file mode 100644 index 0399515a2357..000000000000 --- a/website/identifiers/model.py +++ /dev/null @@ -1,49 +0,0 @@ -# -*- coding: utf-8 -*- - -from bson import ObjectId - -from modularodm import Q -from modularodm import fields -from modularodm.storage.base import KeyExistsException - -from framework.mongo import StoredObject -from framework.mongo.utils import unique_on - - -@unique_on(['referent.0', 'referent.1', 'category']) -class Identifier(StoredObject): - """A persistent identifier model for DOIs, ARKs, and the like.""" - _id = fields.StringField(default=lambda: str(ObjectId())) - # object to which the identifier points - referent = fields.AbstractForeignField(required=True) - # category: e.g. 'ark', 'doi' - category = fields.StringField(required=True) - # value: e.g. 'FK424601' - value = fields.StringField(required=True) - - -class IdentifierMixin(object): - """Model mixin that adds methods for getting and setting Identifier objects - for model objects. - """ - - def get_identifier(self, category): - identifiers = Identifier.find( - Q('referent', 'eq', self) & - Q('category', 'eq', category) - ) - return identifiers[0] if identifiers else None - - def get_identifier_value(self, category): - identifier = self.get_identifier(category) - return identifier.value if identifier else None - - def set_identifier_value(self, category, value): - try: - identifier = Identifier(referent=self, category=category, value=value) - identifier.save() - except KeyExistsException: - identifier = self.get_identifier(category) - assert identifier is not None - identifier.value = value - identifier.save() diff --git a/website/institutions/model.py b/website/institutions/model.py deleted file mode 100644 index 98619330d216..000000000000 --- a/website/institutions/model.py +++ /dev/null @@ -1,193 +0,0 @@ -from django.core.urlresolvers import reverse -from django.db.models.query import QuerySet as DjangoQuerySet - -from modularodm import Q -from modularodm.exceptions import NoResultsFound -from modularodm.query.query import RawQuery -from modularodm.storage.mongostorage import MongoQuerySet - - -class AffiliatedInstitutionsList(list): - ''' - A list to implement append and remove methods to a private node list through a - public Institution-returning property. Initialization should occur with the instance of the public list, - the object the list belongs to, and the private attribute ( a list) the public property - is attached to, and as the return value of the property. - Ex: - class Node(): - _affiliated_institutions = [] - - @property - affiliated_institutions(self): - return AffiliatedInstitutionsList( - [Institution(node) for node in self._affiliated_institutions], - obj=self, private_target='_affiliated_institutions') - ) - ''' - def __init__(self, init, obj, private_target): - super(AffiliatedInstitutionsList, self).__init__(init or []) - self.obj = obj - self.target = private_target - - def append(self, to_append): - temp_list = getattr(self.obj, self.target) - temp_list.append(to_append.node) - setattr(self.obj, self.target, temp_list) - - def remove(self, to_remove): - temp_list = getattr(self.obj, self.target) - temp_list.remove(to_remove.node) - setattr(self.obj, self.target, temp_list) - - -class InstitutionQuerySet(MongoQuerySet): - - def __init__(self, queryset): - if isinstance(queryset, DjangoQuerySet): - model = queryset.model - data = queryset.all() - else: - model = queryset.schema - data = queryset.data - super(InstitutionQuerySet, self).__init__(model, data) - - def sort(self, *field_names): - actual_field_names = [Institution.attribute_map.get(each, each) for each in field_names] - return super(InstitutionQuerySet, self).sort(*actual_field_names) - - def __iter__(self): - for each in super(InstitutionQuerySet, self).__iter__(): - yield Institution(each) - - def _do_getitem(self, index): - item = super(InstitutionQuerySet, self)._do_getitem(index) - if isinstance(item, MongoQuerySet): - return self.__class__(item) - return Institution(item) - -class Institution(object): - ''' - "wrapper" class for Node. Together with the find and institution attributes & methods in Node, - this is to be used to allow interaction with Institutions, which are Nodes (with ' institution_id ' != None), - as if they were a wholly separate collection. To find an institution, use the find methods here, - and to use a Node as Institution, instantiate an Institution with ' Institution(node) ' - ''' - attribute_map = { - '_id': 'institution_id', - 'auth_url': 'institution_auth_url', - 'logout_url': 'institution_logout_url', - 'domains': 'institution_domains', - 'name': 'title', - 'logo_name': 'institution_logo_name', - 'description': 'description', - 'email_domains': 'institution_email_domains', - 'banner_name': 'institution_banner_name', - 'is_deleted': 'is_deleted', - } - - def __init__(self, node=None): - self.node = node - if node is None: - return - for key, value in self.attribute_map.iteritems(): - setattr(self, key, getattr(node, value)) - - def __getattr__(self, item): - return getattr(self.node, item) - - def __eq__(self, other): - if not isinstance(other, self.__class__): - return False - return self._id == other._id - - def save(self): - from website.search.search import update_institution - update_institution(self) - for key, value in self.attribute_map.iteritems(): - if getattr(self, key) != getattr(self.node, value): - setattr(self.node, value, getattr(self, key)) - self.node.save() - - @classmethod - def find(cls, query=None, deleted=False, **kwargs): - from website.models import Node # done to prevent import error - if query and getattr(query, 'nodes', False): - for node in query.nodes: - replacement_attr = cls.attribute_map.get(node.attribute, False) - node.attribute = replacement_attr or node.attribute - elif isinstance(query, RawQuery): - replacement_attr = cls.attribute_map.get(query.attribute, False) - query.attribute = replacement_attr or query.attribute - query = query & Q('institution_id', 'ne', None) if query else Q('institution_id', 'ne', None) - query = query & Q('is_deleted', 'ne', True) if not deleted else query - nodes = Node.find(query, allow_institution=True, **kwargs) - return InstitutionQuerySet(nodes) - - @classmethod - def find_one(cls, query=None, deleted=False, **kwargs): - from website.models import Node - if query and getattr(query, 'nodes', False): - for node in query.nodes: - if node._Q__key in cls.attribute_map: - node._Q__key = cls.attribute_map[node._Q__key] - elif isinstance(query, RawQuery) and query._Q__key in cls.attribute_map: - query._Q__key = cls.attribute_map[query._Q__key] - query = query & Q('institution_id', 'ne', None) if query else Q('institution_id', 'ne', None) - query = query & Q('is_deleted', 'ne', True) if not deleted else query - node = Node.find_one(query, allow_institution=True, **kwargs) - return cls(node) - - @classmethod - def load(cls, key): - from website.models import Node - try: - node = Node.find_one(Q('institution_id', 'eq', key), allow_institution=True) - return cls(node) - except NoResultsFound: - return None - - def __repr__(self): - return ''.format(self.name, self._id) - - @property - def pk(self): - return self._id - - @property - def api_v2_url(self): - return reverse('institutions:institution-detail', kwargs={'institution_id': self._id, 'version': 'v2'}) - - @property - def absolute_api_v2_url(self): - from api.base.utils import absolute_reverse - return absolute_reverse('institutions:institution-detail', kwargs={'institution_id': self._id, 'version': 'v2'}) - - @property - def nodes_url(self): - return self.absolute_api_v2_url + 'nodes/' - - @property - def nodes_relationship_url(self): - return self.absolute_api_v2_url + 'relationships/nodes/' - - @property - def logo_path(self): - if self.logo_name: - return '/static/img/institutions/shields/{}'.format(self.logo_name) - else: - return None - - @property - def logo_path_rounded_corners(self): - logo_base = '/static/img/institutions/shields-rounded-corners/{}-rounded-corners.png' - if self.logo_name: - return logo_base.format(self.logo_name.replace('.png', '')) - else: - return None - - @property - def banner_path(self): - if self.banner_name: - return '/static/img/institutions/banners/{}'.format(self.banner_name) - else: - return None diff --git a/website/institutions/views.py b/website/institutions/views.py index cdbdeca21d06..f54dfceef73b 100644 --- a/website/institutions/views.py +++ b/website/institutions/views.py @@ -1,11 +1,12 @@ import httplib as http -from .model import Institution from framework.exceptions import HTTPError from modularodm import Q from modularodm.exceptions import NoResultsFound +from osf.models import Institution + def serialize_institution(inst): return { 'id': inst._id, diff --git a/website/mails/__init__.py b/website/mails/__init__.py index 95558f1dca63..1ed0bb2c90ab 100644 --- a/website/mails/__init__.py +++ b/website/mails/__init__.py @@ -1,2 +1 @@ from .mails import * # noqa -from .queued_mails import * # noqa diff --git a/website/mails/listeners.py b/website/mails/listeners.py index bde926965ff3..4a0d6939080a 100644 --- a/website/mails/listeners.py +++ b/website/mails/listeners.py @@ -5,7 +5,7 @@ from django.utils import timezone from modularodm import Q -from website import mails, settings +from website import settings from framework.auth import signals as auth_signals from website.project import signals as project_signals from website.conferences import signals as conference_signals @@ -16,9 +16,10 @@ def queue_no_addon_email(user): """Queue an email for user who has not connected an addon after `settings.NO_ADDON_WAIT_TIME` months of signing up for the OSF. """ - mails.queue_mail( + from osf.models.queued_mail import queue_mail, NO_ADDON + queue_mail( to_addr=user.username, - mail=mails.NO_ADDON, + mail=NO_ADDON, send_at=timezone.now() + settings.NO_ADDON_WAIT_TIME, user=user, fullname=user.fullname @@ -29,13 +30,14 @@ def queue_first_public_project_email(user, node, meeting_creation): """Queue and email after user has made their first non-OSF4M project public. """ + from osf.models.queued_mail import queue_mail, QueuedMail, NEW_PUBLIC_PROJECT_TYPE, NEW_PUBLIC_PROJECT if not meeting_creation: - sent_mail = mails.QueuedMail.find(Q('user', 'eq', user) & Q('sent_at', 'ne', None) & - Q('email_type', 'eq', mails.NEW_PUBLIC_PROJECT_TYPE)) + sent_mail = QueuedMail.find(Q('user', 'eq', user) & Q('sent_at', 'ne', None) & + Q('email_type', 'eq', NEW_PUBLIC_PROJECT_TYPE)) if not sent_mail.count(): - mails.queue_mail( + queue_mail( to_addr=user.username, - mail=mails.NEW_PUBLIC_PROJECT, + mail=NEW_PUBLIC_PROJECT, send_at=timezone.now() + settings.NEW_PUBLIC_PROJECT_WAIT_TIME, user=user, nid=node._id, @@ -46,11 +48,12 @@ def queue_first_public_project_email(user, node, meeting_creation): @conference_signals.osf4m_user_created.connect def queue_osf4m_welcome_email(user, conference, node): """Queue an email once a new user is created for OSF for Meetings""" + from osf.models.queued_mail import queue_mail, WELCOME_OSF4M root = (node.get_addon('osfstorage')).get_root() root_children = [child for child in root.children if child.is_file] - mails.queue_mail( + queue_mail( to_addr=user.username, - mail=mails.WELCOME_OSF4M, + mail=WELCOME_OSF4M, send_at=timezone.now() + settings.WELCOME_OSF4M_WAIT_TIME, user=user, conference=conference.name, diff --git a/website/mails/presends.py b/website/mails/presends.py index f78301398875..2bcbf11b68ed 100644 --- a/website/mails/presends.py +++ b/website/mails/presends.py @@ -8,8 +8,7 @@ def no_addon(email): return len(email.user.get_addons()) == 0 def no_login(email): - from website.models import QueuedMail - from website.mails import NO_LOGIN_TYPE + from osf.models.queued_mail import QueuedMail, NO_LOGIN_TYPE sent = QueuedMail.find(Q('user', 'eq', email.user) & Q('email_type', 'eq', NO_LOGIN_TYPE) & Q('_id', 'ne', email._id)) if sent.count(): return False @@ -25,9 +24,9 @@ def new_public_project(email): """ # In line import to prevent circular importing - from website.models import Node + from osf.models import AbstractNode - node = Node.load(email.data['nid']) + node = AbstractNode.load(email.data['nid']) if not node: return False @@ -44,7 +43,7 @@ def welcome_osf4m(email): :return: boolean based on whether the email should be sent """ # In line import to prevent circular importing - from website.files.models import OsfStorageFileNode + from addons.osfstorage.models import OsfStorageFileNode if email.user.date_last_login: if email.user.date_last_login > timezone.now() - settings.WELCOME_OSF4M_WAIT_TIME_GRACE: return False diff --git a/website/mails/queued_mails.py b/website/mails/queued_mails.py deleted file mode 100644 index b26308eae481..000000000000 --- a/website/mails/queued_mails.py +++ /dev/null @@ -1,152 +0,0 @@ -import bson - -from django.utils import timezone -from modularodm import fields, Q -from framework.mongo import StoredObject -from .mails import Mail, send_mail -from website import settings -from website.mails import presends - - -class QueuedMail(StoredObject): - _id = fields.StringField(primary=True, default=lambda: str(bson.ObjectId())) - user = fields.ForeignField('User', index=True, required=True) - to_addr = fields.StringField() - send_at = fields.DateTimeField(index=True, required=True) - - # string denoting the template, presend to be used. Has to be an index of queue_mail types - email_type = fields.StringField(index=True, required=True) - - # dictionary with variables used to populate mako template and store information used in presends - # Example: - # self.data = { - # 'nid' : 'ShIpTo', - # 'fullname': 'Florence Welch', - #} - data = fields.DictionaryField() - sent_at = fields.DateTimeField(index=True) - - def __repr__(self): - if self.sent_at is not None: - return ''.format( - self._id, self.email_type, self.to_addr, self.sent_at - ) - return ''.format( - self._id, self.email_type, self.to_addr, self.send_at - ) - - def send_mail(self): - """ - Grabs the data from this email, checks for user subscription to help mails, - - constructs the mail object and checks presend. Then attempts to send the email - through send_mail() - :return: boolean based on whether email was sent. - """ - mail_struct = queue_mail_types[self.email_type] - presend = mail_struct['presend'](self) - mail = Mail( - mail_struct['template'], - subject=mail_struct['subject'], - categories=mail_struct.get('categories', None) - ) - self.data['osf_url'] = settings.DOMAIN - if presend and self.user.is_active and self.user.osf_mailing_lists.get(settings.OSF_HELP_LIST): - send_mail(self.to_addr or self.user.username, mail, mimetype='html', **(self.data or {})) - self.sent_at = timezone.now() - self.save() - return True - else: - self.__class__.remove_one(self) - return False - - def find_sent_of_same_type_and_user(self): - """ - Queries up for all emails of the same type as self, sent to the same user as self. - Does not look for queue-up emails. - :return: a list of those emails - """ - return self.__class__.find( - Q('email_type', 'eq', self.email_type) & - Q('user', 'eq', self.user) & - Q('sent_at', 'ne', None) - ) - -def queue_mail(to_addr, mail, send_at, user, **context): - """ - Queue an email to be sent using send_mail after a specified amount - of time and if the presend returns True. The presend is attached to - the template under mail. - - :param to_addr: the address email is to be sent to - :param mail: the type of mail. Struct following template: - { 'presend': function(), - 'template': mako template name, - 'subject': mail subject } - :param send_at: datetime object of when to send mail - :param user: user object attached to mail - :param context: IMPORTANT kwargs to be attached to template. - Sending mail will fail if needed for template kwargs are - not parameters. - :return: the QueuedMail object created - """ - new_mail = QueuedMail( - user=user, - to_addr=to_addr, - send_at=send_at, - email_type=mail['template'], - data=context - ) - new_mail.save() - return new_mail - - -# Predefined email templates. Structure: -#EMAIL_TYPE = { -# 'template': the mako template used for email_type, -# 'subject': subject used for the actual email, -# 'categories': categories to attach to the email using Sendgrid's SMTPAPI. -# 'presend': predicate function that determines whether an email should be sent. May also -# modify mail.data. -#} - -NO_ADDON = { - 'template': 'no_addon', - 'subject': 'Link an add-on to your OSF project', - 'presend': presends.no_addon, - 'categories': ['engagement', 'engagement-no-addon'] -} - -NO_LOGIN = { - 'template': 'no_login', - 'subject': 'What you\'re missing on the OSF', - 'presend': presends.no_login, - 'categories': ['engagement', 'engagement-no-login'] -} - -NEW_PUBLIC_PROJECT = { - 'template': 'new_public_project', - 'subject': 'Now, public. Next, impact.', - 'presend': presends.new_public_project, - 'categories': ['engagement', 'engagement-new-public-project'] -} - -WELCOME_OSF4M = { - 'template': 'welcome_osf4m', - 'subject': 'The benefits of sharing your presentation', - 'presend': presends.welcome_osf4m, - 'categories': ['engagement', 'engagement-welcome-osf4m'] -} - -NO_ADDON_TYPE = 'no_addon' -NO_LOGIN_TYPE = 'no_login' -NEW_PUBLIC_PROJECT_TYPE = 'new_public_project' -WELCOME_OSF4M_TYPE = 'welcome_osf4m' - -# Used to keep relationship from stored string 'email_type' to the predefined queued_email objects. -queue_mail_types = { - NO_ADDON_TYPE: NO_ADDON, - NO_LOGIN_TYPE: NO_LOGIN, - NEW_PUBLIC_PROJECT_TYPE: NEW_PUBLIC_PROJECT, - WELCOME_OSF4M_TYPE: WELCOME_OSF4M -} diff --git a/website/models.py b/website/models.py deleted file mode 100644 index 9ea29136354b..000000000000 --- a/website/models.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -"""Consolidates all necessary models from the framework and website packages. -""" - -from framework.auth.core import User -from framework.guid.model import Guid, BlacklistGuid -from framework.sessions.model import Session - -from website.project.model import ( - Node, NodeLog, - Tag, WatchConfig, MetaSchema, Pointer, - Comment, PrivateLink, MetaData, - AlternativeCitation, - DraftRegistration, - DraftRegistrationLog -) -from website.project.sanctions import ( - DraftRegistrationApproval, - Embargo, - EmbargoTerminationApproval, - RegistrationApproval, - Retraction, -) -from website.oauth.models import ApiOAuth2Application, ExternalAccount, ApiOAuth2PersonalToken -from website.identifiers.model import Identifier -from website.institutions.model import Institution # flake8: noqa - -from website.mails import QueuedMail -from website.files.models.base import FileVersion -from website.files.models.base import StoredFileNode -from website.files.models.base import TrashedFileNode -from website.conferences.model import Conference, MailRecord -from website.notifications.model import NotificationDigest -from website.notifications.model import NotificationSubscription -from website.archiver.model import ArchiveJob, ArchiveTarget -from website.project.licenses import NodeLicense, NodeLicenseRecord -from website.project.taxonomies import Subject -from website.preprints.model import PreprintService, PreprintProvider - -# All models -MODELS = ( - User, - ApiOAuth2Application, ApiOAuth2PersonalToken, Node, - NodeLog, StoredFileNode, TrashedFileNode, FileVersion, - Tag, WatchConfig, Session, Guid, MetaSchema, Pointer, - MailRecord, Comment, PrivateLink, MetaData, Conference, - NotificationSubscription, NotificationDigest, - ExternalAccount, Identifier, - Embargo, Retraction, RegistrationApproval, EmbargoTerminationApproval, - ArchiveJob, ArchiveTarget, BlacklistGuid, - QueuedMail, AlternativeCitation, - DraftRegistration, DraftRegistrationApproval, DraftRegistrationLog, - NodeLicense, NodeLicenseRecord, - Subject, PreprintProvider, PreprintService -) - -GUID_MODELS = (User, Node, Comment, MetaData) diff --git a/website/notifications/events/files.py b/website/notifications/events/files.py index 9e269a32c6ce..4f2b291b269d 100644 --- a/website/notifications/events/files.py +++ b/website/notifications/events/files.py @@ -16,8 +16,7 @@ register, Event, event_registry, RegistryError ) from website.notifications.events import utils as event_utils -from website.models import Node -from website.project.model import NodeLog +from osf.models import AbstractNode as Node, NodeLog from addons.base.signals import file_updated as signal diff --git a/website/notifications/model.py b/website/notifications/model.py deleted file mode 100644 index a3581ae46a62..000000000000 --- a/website/notifications/model.py +++ /dev/null @@ -1,69 +0,0 @@ -from modularodm import fields - -from framework.mongo import StoredObject, ObjectId -from modularodm.exceptions import ValidationValueError - -from website.project.model import Node -from website.notifications.constants import NOTIFICATION_TYPES - - -def validate_subscription_type(value): - if value not in NOTIFICATION_TYPES: - raise ValidationValueError - - -class NotificationSubscription(StoredObject): - _id = fields.StringField(primary=True) # pxyz_wiki_updated, uabc_comment_replies - - event_name = fields.StringField() # wiki_updated, comment_replies - owner = fields.AbstractForeignField() - - # Notification types - none = fields.ForeignField('user', list=True) - email_digest = fields.ForeignField('user', list=True) - email_transactional = fields.ForeignField('user', list=True) - - def add_user_to_subscription(self, user, notification_type, save=True): - for nt in NOTIFICATION_TYPES: - if user in getattr(self, nt): - if nt != notification_type: - getattr(self, nt).remove(user) - else: - if nt == notification_type: - getattr(self, nt).append(user) - - if notification_type != 'none' and isinstance(self.owner, Node) and self.owner.parent_node: - user_subs = self.owner.parent_node.child_node_subscriptions - if self.owner._id not in user_subs.setdefault(user._id, []): - user_subs[user._id].append(self.owner._id) - self.owner.parent_node.save() - - if save: - self.save() - - def remove_user_from_subscription(self, user, save=True): - for notification_type in NOTIFICATION_TYPES: - try: - getattr(self, notification_type, []).remove(user) - except ValueError: - pass - - if isinstance(self.owner, Node) and self.owner.parent_node: - try: - self.owner.parent_node.child_node_subscriptions.get(user._id, []).remove(self.owner._id) - self.owner.parent_node.save() - except ValueError: - pass - - if save: - self.save() - - -class NotificationDigest(StoredObject): - _id = fields.StringField(primary=True, default=lambda: str(ObjectId())) - user_id = fields.StringField(index=True) - timestamp = fields.DateTimeField() - send_type = fields.StringField(index=True, validate=validate_subscription_type) - event = fields.StringField() - message = fields.StringField() - node_lineage = fields.StringField(list=True) diff --git a/website/notifications/views.py b/website/notifications/views.py index 55032a538028..1645bd484c9e 100644 --- a/website/notifications/views.py +++ b/website/notifications/views.py @@ -6,11 +6,10 @@ from framework.auth.decorators import must_be_logged_in from framework.exceptions import HTTPError +from osf.models import AbstractNode as Node, NotificationSubscription from website.notifications import utils from website.notifications.constants import NOTIFICATION_TYPES -from website.notifications.model import NotificationSubscription from website.project.decorators import must_be_valid_project -from website.project.model import Node @must_be_logged_in diff --git a/website/oauth/models/__init__.py b/website/oauth/models/__init__.py deleted file mode 100644 index 25a19a124dd2..000000000000 --- a/website/oauth/models/__init__.py +++ /dev/null @@ -1,655 +0,0 @@ -# -*- coding: utf-8 -*- - -import abc -import logging -import datetime -import functools -import httplib as http -import time -import urlparse -import uuid - -from flask import request -from django.utils import timezone -from oauthlib.oauth2.rfc6749.errors import MissingTokenError -from requests.exceptions import HTTPError as RequestsHTTPError - -from modularodm import fields, Q -from modularodm.validators import MaxLengthValidator, URLValidator -from requests_oauthlib import OAuth1Session -from requests_oauthlib import OAuth2Session - -from osf.exceptions import ValidationError - -from framework.auth import cas -from framework.encryption import EncryptedStringField -from framework.exceptions import HTTPError, PermissionsError -from framework.mongo import ObjectId, StoredObject -from framework.mongo.utils import unique_on -from framework.mongo.validators import string_required -from framework.sessions import session -from website import settings -from website.oauth.utils import PROVIDER_LOOKUP -from website.security import random_string -from website.util import web_url_for, api_v2_url - -logger = logging.getLogger(__name__) - -OAUTH1 = 1 -OAUTH2 = 2 - -generate_client_secret = functools.partial(random_string, length=40) - -@unique_on(['provider', 'provider_id']) -class ExternalAccount(StoredObject): - """An account on an external service. - - Note that this object is not and should not be aware of what other objects - are associated with it. This is by design, and this object should be kept as - thin as possible, containing only those fields that must be stored in the - database. - - The ``provider`` field is a de facto foreign key to an ``ExternalProvider`` - object, as providers are not stored in the database. - """ - _id = fields.StringField(default=lambda: str(ObjectId()), primary=True) - - # The OAuth credentials. One or both of these fields should be populated. - # For OAuth1, this is usually the "oauth_token" - # For OAuth2, this is usually the "access_token" - oauth_key = EncryptedStringField() - - # For OAuth1, this is usually the "oauth_token_secret" - # For OAuth2, this is not used - oauth_secret = EncryptedStringField() - - # Used for OAuth2 only - refresh_token = EncryptedStringField() - date_last_refreshed = fields.DateTimeField() - expires_at = fields.DateTimeField() - scopes = fields.StringField(list=True, default=lambda: list()) - - # The `name` of the service - # This lets us query for only accounts on a particular provider - provider = fields.StringField(required=True) - # The proper 'name' of the service - # Needed for account serialization - provider_name = fields.StringField(required=True) - - # The unique, persistent ID on the remote service. - provider_id = fields.StringField() - - # The user's name on the external service - display_name = EncryptedStringField() - # A link to the user's profile on the external service - profile_url = EncryptedStringField() - - def __repr__(self): - return ''.format(self.provider, - self.provider_id) - - -class ExternalProviderMeta(abc.ABCMeta): - """Keeps track of subclasses of the ``ExternalProvider`` object""" - - def __init__(cls, name, bases, dct): - super(ExternalProviderMeta, cls).__init__(name, bases, dct) - if not isinstance(cls.short_name, abc.abstractproperty): - PROVIDER_LOOKUP[cls.short_name] = cls - - -class ExternalProvider(object): - """A connection to an external service (ex: GitHub). - - This object contains no credentials, and is not saved in the database. - It provides an unauthenticated session with the provider, unless ``account`` - has been set - in which case, it provides a connection authenticated as the - ``ExternalAccount`` instance. - - Conceptually, this can be thought of as an extension of ``ExternalAccount``. - It's a separate object because this must be subclassed for each provider, - and ``ExternalAccount`` instances are stored within a single collection. - """ - - __metaclass__ = ExternalProviderMeta - - # Default to OAuth v2.0. - _oauth_version = OAUTH2 - - # Providers that have expiring tokens must override these - auto_refresh_url = None - refresh_time = 0 # When to refresh the oauth_key (seconds) - expiry_time = 0 # If/When the refresh token expires (seconds). 0 indicates a non-expiring refresh token - - def __init__(self, account=None): - super(ExternalProvider, self).__init__() - - # provide an unauthenticated session by default - self.account = account - - def __repr__(self): - return '<{name}: {status}>'.format( - name=self.__class__.__name__, - status=self.account.provider_id if self.account else 'anonymous' - ) - - @abc.abstractproperty - def auth_url_base(self): - """The base URL to begin the OAuth dance""" - pass - - @property - def auth_url(self): - """The URL to begin the OAuth dance. - - This property method has side effects - it at least adds temporary - information to the session so that callbacks can be associated with - the correct user. For OAuth1, it calls the provider to obtain - temporary credentials to start the flow. - """ - - # create a dict on the session object if it's not already there - if session.data.get('oauth_states') is None: - session.data['oauth_states'] = {} - - if self._oauth_version == OAUTH2: - # build the URL - oauth = OAuth2Session( - self.client_id, - redirect_uri=web_url_for('oauth_callback', - service_name=self.short_name, - _absolute=True), - scope=self.default_scopes, - ) - - url, state = oauth.authorization_url(self.auth_url_base) - - # save state token to the session for confirmation in the callback - session.data['oauth_states'][self.short_name] = {'state': state} - - elif self._oauth_version == OAUTH1: - # get a request token - oauth = OAuth1Session( - client_key=self.client_id, - client_secret=self.client_secret, - ) - - # request temporary credentials from the provider - response = oauth.fetch_request_token(self.request_token_url) - - # store them in the session for use in the callback - session.data['oauth_states'][self.short_name] = { - 'token': response.get('oauth_token'), - 'secret': response.get('oauth_token_secret'), - } - - url = oauth.authorization_url(self.auth_url_base) - - return url - - @abc.abstractproperty - def callback_url(self): - """The provider URL to exchange the code for a token""" - pass - - @abc.abstractproperty - def client_id(self): - """OAuth Client ID. a/k/a: Application ID""" - pass - - @abc.abstractproperty - def client_secret(self): - """OAuth Client Secret. a/k/a: Application Secret, Application Key""" - pass - - default_scopes = list() - - @abc.abstractproperty - def name(self): - """Human-readable name of the service. e.g.: ORCiD, GitHub""" - pass - - @abc.abstractproperty - def short_name(self): - """Name of the service to be used internally. e.g.: orcid, github""" - pass - - def auth_callback(self, user, **kwargs): - """Exchange temporary credentials for permanent credentials - - This is called in the view that handles the user once they are returned - to the OSF after authenticating on the external service. - """ - - if 'error' in request.args: - return False - - # make sure the user has temporary credentials for this provider - try: - cached_credentials = session.data['oauth_states'][self.short_name] - except KeyError: - raise PermissionsError('OAuth flow not recognized.') - - if self._oauth_version == OAUTH1: - request_token = request.args.get('oauth_token') - - # make sure this is the same user that started the flow - if cached_credentials.get('token') != request_token: - raise PermissionsError('Request token does not match') - - response = OAuth1Session( - client_key=self.client_id, - client_secret=self.client_secret, - resource_owner_key=cached_credentials.get('token'), - resource_owner_secret=cached_credentials.get('secret'), - verifier=request.args.get('oauth_verifier'), - ).fetch_access_token(self.callback_url) - - elif self._oauth_version == OAUTH2: - state = request.args.get('state') - - # make sure this is the same user that started the flow - if cached_credentials.get('state') != state: - raise PermissionsError('Request token does not match') - - try: - response = OAuth2Session( - self.client_id, - redirect_uri=web_url_for( - 'oauth_callback', - service_name=self.short_name, - _absolute=True - ), - ).fetch_token( - self.callback_url, - client_secret=self.client_secret, - code=request.args.get('code'), - ) - except (MissingTokenError, RequestsHTTPError): - raise HTTPError(http.SERVICE_UNAVAILABLE) - # pre-set as many values as possible for the ``ExternalAccount`` - info = self._default_handle_callback(response) - # call the hook for subclasses to parse values from the response - info.update(self.handle_callback(response)) - - return self._set_external_account(user, info) - - def _set_external_account(self, user, info): - try: - # create a new ``ExternalAccount`` ... - self.account = ExternalAccount( - provider=self.short_name, - provider_id=info['provider_id'], - provider_name=self.name, - ) - self.account.save() - except ValidationError: - # ... or get the old one - self.account = ExternalAccount.find_one( - Q('provider', 'eq', self.short_name) & - Q('provider_id', 'eq', info['provider_id']) - ) - assert self.account is not None - - # ensure that provider_name is correct - self.account.provider_name = self.name - # required - self.account.oauth_key = info['key'] - - # only for OAuth1 - self.account.oauth_secret = info.get('secret') - - # only for OAuth2 - self.account.expires_at = info.get('expires_at') - self.account.refresh_token = info.get('refresh_token') - self.account.date_last_refreshed = datetime.datetime.utcnow() - - # additional information - self.account.display_name = info.get('display_name') - self.account.profile_url = info.get('profile_url') - - self.account.save() - - # add it to the user's list of ``ExternalAccounts`` - if not user.external_accounts.filter(id=self.account.id).exists(): - user.external_accounts.add(self.account) - user.save() - - return True - - def _default_handle_callback(self, data): - """Parse as much out of the key exchange's response as possible. - - This should not be over-ridden in subclasses. - """ - if self._oauth_version == OAUTH1: - key = data.get('oauth_token') - secret = data.get('oauth_token_secret') - - values = {} - - if key: - values['key'] = key - if secret: - values['secret'] = secret - - return values - - elif self._oauth_version == OAUTH2: - key = data.get('access_token') - refresh_token = data.get('refresh_token') - expires_at = data.get('expires_at') - scopes = data.get('scope') - - values = {} - - if key: - values['key'] = key - if scopes: - values['scope'] = scopes - if refresh_token: - values['refresh_token'] = refresh_token - if expires_at: - values['expires_at'] = datetime.datetime.fromtimestamp( - float(expires_at) - ) - - return values - - @abc.abstractmethod - def handle_callback(self, response): - """Hook for allowing subclasses to parse information from the callback. - - Subclasses should implement this method to provide `provider_id` - and `profile_url`. - - Values provided by ``self._default_handle_callback`` can be over-ridden - here as well, in the unexpected case that they are parsed incorrectly - by default. - - :param response: The JSON returned by the provider during the exchange - :return dict: - """ - pass - - def refresh_oauth_key(self, force=False, extra={}, resp_auth_token_key='access_token', - resp_refresh_token_key='refresh_token', resp_expiry_fn=None): - """Handles the refreshing of an oauth_key for account associated with this provider. - Not all addons need to use this, as some do not have oauth_keys that expire. - - Subclasses must define the following for this functionality: - `auto_refresh_url` - URL to use when refreshing tokens. Must use HTTPS - `refresh_time` - Time (in seconds) that the oauth_key should be refreshed after. - Typically half the duration of validity. Cannot be 0. - - Providers may have different keywords in their response bodies, kwargs - `resp_*_key` allow subclasses to override these if necessary. - - kwarg `resp_expiry_fn` allows subclasses to specify a function that will return the - datetime-formatted oauth_key expiry key, given a successful refresh response from - `auto_refresh_url`. A default using 'expires_at' as a key is provided. - """ - # Ensure this is an authenticated Provider that uses token refreshing - if not (self.account and self.auto_refresh_url): - return False - - # Ensure this Provider is for a valid addon - if not (self.client_id and self.client_secret): - return False - - # Ensure a refresh is needed - if not (force or self._needs_refresh()): - return False - - if self.has_expired_credentials and not force: - return False - - resp_expiry_fn = resp_expiry_fn or (lambda x: datetime.datetime.utcfromtimestamp(time.time() + float(x['expires_in']))) - - client = OAuth2Session( - self.client_id, - token={ - 'access_token': self.account.oauth_key, - 'refresh_token': self.account.refresh_token, - 'token_type': 'Bearer', - 'expires_in': '-30', - } - ) - - extra.update({ - 'client_id': self.client_id, - 'client_secret': self.client_secret - }) - - token = client.refresh_token( - self.auto_refresh_url, - **extra - ) - self.account.oauth_key = token[resp_auth_token_key] - self.account.refresh_token = token[resp_refresh_token_key] - self.account.expires_at = resp_expiry_fn(token) - self.account.date_last_refreshed = datetime.datetime.utcnow() - self.account.save() - return True - - def _needs_refresh(self): - """Determines whether or not an associated ExternalAccount needs - a oauth_key. - - return bool: True if needs_refresh - """ - if self.refresh_time and self.account.expires_at: - return (self.account.expires_at - timezone.now()).total_seconds() < self.refresh_time - return False - - @property - def has_expired_credentials(self): - """Determines whether or not an associated ExternalAccount has - expired credentials that can no longer be renewed - - return bool: True if cannot be refreshed - """ - if self.expiry_time and self.account.expires_at: - return (timezone.now() - self.account.expires_at).total_seconds() > self.expiry_time - return False - - -class ApiOAuth2Scope(StoredObject): - """ - Store information about recognized OAuth2 scopes. Only scopes registered under this database model can - be requested by third parties. - """ - _id = fields.StringField(primary=True, - default=lambda: str(ObjectId())) - name = fields.StringField(unique=True, required=True, index=True) - description = fields.StringField(required=True) - is_active = fields.BooleanField(default=True, index=True) # TODO: Add mechanism to deactivate a scope? - - -class ApiOAuth2Application(StoredObject): - """Registration and key for user-created OAuth API applications - - This collection is also used by CAS to create the master list of available applications. - Any changes made to field names in this model must be echoed in the CAS implementation. - """ - _id = fields.StringField( - primary=True, - default=lambda: str(ObjectId()) - ) - - # Client ID and secret. Use separate ID field so ID format doesn't have to be restricted to database internals. - client_id = fields.StringField(default=lambda: uuid.uuid4().hex, # Not *guaranteed* unique, but very unlikely - unique=True, - index=True) - client_secret = fields.StringField(default=generate_client_secret) - - is_active = fields.BooleanField(default=True, # Set to False if application is deactivated - index=True) - - owner = fields.ForeignField('User', - index=True, - required=True) - - # User-specified application descriptors - name = fields.StringField(index=True, required=True, validate=[string_required, MaxLengthValidator(200)]) - description = fields.StringField(required=False, validate=MaxLengthValidator(1000)) - - date_created = fields.DateTimeField(auto_now_add=True, - editable=False) - - home_url = fields.StringField(required=True, - validate=URLValidator()) - callback_url = fields.StringField(required=True, - validate=URLValidator()) - - def deactivate(self, save=False): - """ - Deactivate an ApiOAuth2Application - - Does not delete the database record, but revokes all tokens and sets a flag that hides this instance from API - """ - client = cas.get_client() - # Will raise a CasHttpError if deletion fails, which will also stop setting of active=False. - resp = client.revoke_application_tokens(self.client_id, self.client_secret) # noqa - - self.is_active = False - - if save: - self.save() - return True - - def reset_secret(self, save=False): - """ - Reset the secret of an ApiOAuth2Application - Revokes all tokens - """ - client = cas.get_client() - client.revoke_application_tokens(self.client_id, self.client_secret) - self.client_secret = generate_client_secret() - - if save: - self.save() - return True - - @property - def url(self): - return '/settings/applications/{}/'.format(self.client_id) - - @property - def absolute_url(self): - return urlparse.urljoin(settings.DOMAIN, self.url) - - # Properties used by Django and DRF "Links: self" field - @property - def absolute_api_v2_url(self): - path = '/applications/{}/'.format(self.client_id) - return api_v2_url(path) - - # used by django and DRF - def get_absolute_url(self): - return self.absolute_api_v2_url - -class ApiOAuth2PersonalToken(StoredObject): - """Information for user-created personal access tokens - - This collection is also used by CAS to create the master list of available tokens. - Any changes made to field names in this model must be echoed in the CAS implementation. - """ - _id = fields.StringField(primary=True, - default=lambda: str(ObjectId())) - - # Name of the field being `token_id` is a CAS requirement. - # This is the actual value of the token that's used to authenticate - token_id = fields.StringField(default=functools.partial(random_string, length=70), - unique=True) - - owner = fields.ForeignField('User', - index=True, - required=True) - - name = fields.StringField(required=True, index=True) - - # This field is a space delimited list of scopes, e.g. "osf.full_read osf.full_write" - scopes = fields.StringField(required=True) - - is_active = fields.BooleanField(default=True, index=True) - - def deactivate(self, save=False): - """ - Deactivate an ApiOAuth2PersonalToken - - Does not delete the database record, but hides this instance from API - """ - client = cas.get_client() - # Will raise a CasHttpError if deletion fails for any reason other than the token - # not yet being created. This will also stop setting of active=False. - try: - resp = client.revoke_tokens({'token': self.token_id}) # noqa - except cas.CasHTTPError as e: - if e.code == 400: - pass # Token hasn't been used yet, so not created in cas - else: - raise e - - self.is_active = False - - if save: - self.save() - return True - - @property - def url(self): - return '/settings/tokens/{}/'.format(self._id) - - @property - def absolute_url(self): - return urlparse.urljoin(settings.DOMAIN, self.url) - - # Properties used by Django and DRF "Links: self" field - @property - def absolute_api_v2_url(self): - path = '/tokens/{}/'.format(self._id) - return api_v2_url(path) - - # used by django and DRF - def get_absolute_url(self): - return self.absolute_api_v2_url - - -class BasicAuthProviderMixin(object): - """ - Providers utilizing BasicAuth can utilize this class to implement the - storage providers framework by subclassing this mixin. This provides - a translation between the oauth parameters and the BasicAuth parameters. - - The password here is kept decrypted by default. - """ - - def __init__(self, account=None, host=None, username=None, password=None): - super(BasicAuthProviderMixin, self).__init__() - if account: - self.account = account - elif not account and host and password and username: - self.account = ExternalAccount( - display_name=username, - oauth_key=password, - oauth_secret=host.lower(), - provider_id='{}:{}'.format(host.lower(), username), - profile_url=host.lower(), - provider=self.short_name, - provider_name=self.name - ) - else: - self.account = None - - @property - def host(self): - return self.account.profile_url - - @property - def username(self): - return self.account.display_name - - @property - def password(self): - return self.account.oauth_key diff --git a/website/oauth/views.py b/website/oauth/views.py index 78723daa9d8e..de6a023bb94c 100644 --- a/website/oauth/views.py +++ b/website/oauth/views.py @@ -6,7 +6,7 @@ from framework.auth.decorators import must_be_logged_in from framework.exceptions import HTTPError -from website.oauth.models import ExternalAccount +from osf.models import ExternalAccount from website.oauth.utils import get_service from website.oauth.signals import oauth_complete diff --git a/website/preprints/model.py b/website/preprints/model.py deleted file mode 100644 index 1644abac2082..000000000000 --- a/website/preprints/model.py +++ /dev/null @@ -1,262 +0,0 @@ -import datetime -import urlparse - -from modularodm import fields, Q - -from framework.encryption import EncryptedStringField -from framework.celery_tasks.handlers import enqueue_task -from framework.exceptions import PermissionsError -from framework.guid.model import GuidStoredObject -from framework.mongo import ObjectId, StoredObject -from framework.mongo.utils import unique_on -from website.preprints.tasks import on_preprint_updated -from website.project.model import NodeLog -from website.project.licenses import set_license -from website.project.taxonomies import Subject, validate_subject_hierarchy -from website.util import api_v2_url -from website.util.permissions import ADMIN -from website import settings - -@unique_on(['node', 'provider']) -class PreprintService(GuidStoredObject): - - _id = fields.StringField(primary=True) - date_created = fields.DateTimeField(auto_now_add=True) - date_modified = fields.DateTimeField(auto_now=True) - provider = fields.ForeignField('PreprintProvider', index=True) - node = fields.ForeignField('Node', index=True) - is_published = fields.BooleanField(default=False, index=True) - date_published = fields.DateTimeField() - license = fields.ForeignField('NodeLicenseRecord') - domain = fields.StringField() - - # This is a list of tuples of Subject id's. MODM doesn't do schema - # validation for DictionaryFields, but would unsuccessfully attempt - # to validate the schema for a list of lists of ForeignFields. - # - # Format: [[root_subject._id, ..., child_subject._id], ...] - subjects = fields.DictionaryField(list=True) - - @property - def primary_file(self): - if not self.node: - return - return self.node.preprint_file - - @property - def article_doi(self): - if not self.node: - return - return self.node.preprint_article_doi - - @property - def is_preprint_orphan(self): - if not self.node: - return - return self.node.is_preprint_orphan - - @property - def deep_url(self): - # Required for GUID routing - return '/preprints/{}/'.format(self._primary_key) - - @property - def url(self): - if self.provider.domain_redirect_enabled or self.provider._id == 'osf': - return '/{}/'.format(self._id) - - return '/preprints/{}/{}/'.format(self.provider._id, self._id) - - @property - def absolute_url(self): - return urlparse.urljoin( - self.provider.domain if self.provider.domain_redirect_enabled else settings.DOMAIN, - self.url - ) - - @property - def absolute_api_v2_url(self): - path = '/preprints/{}/'.format(self._id) - return api_v2_url(path) - - def has_permission(self, *args, **kwargs): - return self.node.has_permission(*args, **kwargs) - - def get_subjects(self): - ret = [] - for subj_list in self.subjects: - subj_hierarchy = [] - for subj_id in subj_list: - subj = Subject.load(subj_id) - if subj: - subj_hierarchy += ({'id': subj_id, 'text': subj.text}, ) - if subj_hierarchy: - ret.append(subj_hierarchy) - return ret - - def set_subjects(self, preprint_subjects, auth, save=False): - if not self.node.has_permission(auth.user, ADMIN): - raise PermissionsError('Only admins can change a preprint\'s subjects.') - - self.subjects = [] - for subj_list in preprint_subjects: - subj_hierarchy = [] - for s in subj_list: - subj_hierarchy.append(s) - if subj_hierarchy: - validate_subject_hierarchy(subj_hierarchy) - self.subjects.append(subj_hierarchy) - - if save: - self.save() - - def set_primary_file(self, preprint_file, auth, save=False): - if not self.node.has_permission(auth.user, ADMIN): - raise PermissionsError('Only admins can change a preprint\'s primary file.') - - if preprint_file.node != self.node or preprint_file.provider != 'osfstorage': - raise ValueError('This file is not a valid primary file for this preprint.') - - existing_file = self.node.preprint_file - self.node.preprint_file = preprint_file - - # only log if updating the preprint file, not adding for the first time - if existing_file: - self.node.add_log( - action=NodeLog.PREPRINT_FILE_UPDATED, - params={ - 'preprint': self._id - }, - auth=auth, - save=False - ) - - if save: - self.save() - self.node.save() - - def set_published(self, published, auth, save=False): - if not self.node.has_permission(auth.user, ADMIN): - raise PermissionsError('Only admins can publish a preprint.') - - if self.is_published and not published: - raise ValueError('Cannot unpublish preprint.') - - self.is_published = published - - if published: - if not (self.node.preprint_file and self.node.preprint_file.node == self.node): - raise ValueError('Preprint node is not a valid preprint; cannot publish.') - if not self.provider: - raise ValueError('Preprint provider not specified; cannot publish.') - if not self.subjects: - raise ValueError('Preprint must have at least one subject to be published.') - self.date_published = datetime.datetime.utcnow() - self.node._has_abandoned_preprint = False - - self.node.add_log( - action=NodeLog.PREPRINT_INITIATED, - params={ - 'preprint': self._id - }, - auth=auth, - save=False, - ) - - if not self.node.is_public: - self.node.set_privacy( - self.node.PUBLIC, - auth=None, - log=True - ) - - if save: - self.node.save() - self.save() - - def set_preprint_license(self, license_detail, auth, save=False): - - license_record, license_changed = set_license(self, license_detail, auth, node_type='preprint') - - if license_changed: - self.node.add_log( - action=NodeLog.PREPRINT_LICENSE_UPDATED, - params={ - 'preprint': self._id, - 'new_license': license_record.node_license.name - }, - auth=auth, - save=False - ) - - if save: - self.save() - - def save(self, *args, **kwargs): - saved_fields = super(PreprintService, self).save(*args, **kwargs) - if saved_fields: - enqueue_task(on_preprint_updated.s(self._id)) - - -class PreprintProvider(StoredObject): - _id = fields.StringField(primary=True, default=lambda: str(ObjectId())) - name = fields.StringField(required=True) - logo_name = fields.StringField() - header_text = fields.StringField() - description = fields.StringField() - domain = fields.StringField() - banner_name = fields.StringField() - external_url = fields.StringField() - email_contact = fields.StringField() - email_support = fields.StringField() - example = fields.StringField() - access_token = EncryptedStringField() - advisory_board = fields.StringField() - social_twitter = fields.StringField() - social_facebook = fields.StringField() - social_instagram = fields.StringField() - subjects_acceptable = fields.DictionaryField(list=True, default=lambda: []) - licenses_acceptable = fields.ForeignField('NodeLicense', list=True, default=lambda: []) - - @property - def top_level_subjects(self): - if len(self.subjects_acceptable) == 0: - return Subject.find(Q('parents', 'eq', [])) - tops = set([sub[0][0] for sub in self.subjects_acceptable]) - return [Subject.load(sub) for sub in tops] - - @property - def all_subjects(self): - q = [] - for rule in self.subjects_acceptable: - if rule[1]: - q.append(Q('parents', 'eq', Subject.load(rule[0][-1]))) - if len(rule[0]) == 1: - potential_parents = Subject.find(Q('parents', 'eq', Subject.load(rule[0][-1]))) - for parent in potential_parents: - q.append(Q('parents', 'eq', parent)) - for sub in rule[0]: - q.append(Q('_id', 'eq', sub)) - return Subject.find(reduce(lambda x, y: x | y, q)) if len(q) > 1 else (Subject.find(q[0]) if len(q) else Subject.find()) - - def get_absolute_url(self): - return '{}preprint_providers/{}'.format(self.absolute_api_v2_url, self._id) - - @property - def absolute_api_v2_url(self): - path = '/preprint_providers/{}/'.format(self._id) - return api_v2_url(path) - - @property - def logo_path(self): - if self.logo_name: - return '/static/img/preprint_providers/{}'.format(self.logo_name) - else: - return None - - @property - def banner_path(self): - if self.logo_name: - return '/static/img/preprint_providers/{}'.format(self.logo_name) - else: - return None diff --git a/website/prereg/utils.py b/website/prereg/utils.py index 727ac9499497..dad06d7a1a20 100644 --- a/website/prereg/utils.py +++ b/website/prereg/utils.py @@ -8,21 +8,21 @@ def drafts_for_user(user, campaign): - from website import models # noqa + from osf.models import DraftRegistration, Node PREREG_CHALLENGE_METASCHEMA = get_prereg_schema(campaign) - return models.DraftRegistration.objects.filter( + return DraftRegistration.objects.filter( registration_schema=PREREG_CHALLENGE_METASCHEMA, approval=None, registered_node=None, - branched_from__in=models.Node.objects.filter( + branched_from__in=Node.objects.filter( is_deleted=False, contributor__admin=True, contributor__user=user).values_list('id', flat=True)) def get_prereg_schema(campaign='prereg'): - from website.models import MetaSchema # noqa + from osf.models import MetaSchema if campaign not in PREREG_CAMPAIGNS: raise ValueError('campaign must be one of: {}'.format(', '.join(PREREG_CAMPAIGNS.keys()))) schema_name = PREREG_CAMPAIGNS[campaign] diff --git a/website/profile/views.py b/website/profile/views.py index 9abf75d266e4..3c9d66b2df8c 100644 --- a/website/profile/views.py +++ b/website/profile/views.py @@ -10,7 +10,7 @@ import mailchimp from modularodm.exceptions import ValidationError, NoResultsFound, MultipleResultsFound from modularodm import Q -from osf.models import Node, NodeRelation +from osf.models import Node, NodeRelation, OSFUser as User from framework import sentry from framework.auth import Auth @@ -25,11 +25,11 @@ from framework.flask import redirect # VOL-aware redirect from framework.status import push_status_message +from osf.models import ApiOAuth2Application, ApiOAuth2PersonalToken from website import mails from website import mailchimp_utils from website import settings from website.project.utils import PROJECT_QUERY -from website.models import ApiOAuth2Application, ApiOAuth2PersonalToken, User from website.oauth.utils import get_available_scopes from website.profile import utils as profile_utils from website.util.time import throttle_period_expired diff --git a/website/project/__init__.py b/website/project/__init__.py index a535b31a01c7..eb81151c0190 100644 --- a/website/project/__init__.py +++ b/website/project/__init__.py @@ -3,7 +3,6 @@ from django.apps import apps -from .model import PrivateLink from framework.auth.core import Auth from framework.mongo.utils import from_mongo from modularodm import Q @@ -103,6 +102,7 @@ def new_private_link(name, user, nodes, anonymous): :return PrivateLink: Created private link """ + PrivateLink = apps.get_model('osf.PrivateLink') NodeLog = apps.get_model('osf.NodeLog') key = str(uuid.uuid4()).replace('-', '') diff --git a/website/project/decorators.py b/website/project/decorators.py index b7979839bf15..3cf3ee2d64f3 100644 --- a/website/project/decorators.py +++ b/website/project/decorators.py @@ -15,7 +15,7 @@ from framework.auth.decorators import collect_auth from framework.mongo.utils import get_or_http_error -from website.models import Node +from osf.models import AbstractNode as Node from website import settings _load_node_or_fail = lambda pk: get_or_http_error(Node, pk) @@ -225,7 +225,7 @@ def wrapped(*args, **kwargs): kwargs['auth'].private_key = key link_anon = None if not include_view_only_anon: - from website.models import PrivateLink + from osf.models import PrivateLink try: link_anon = PrivateLink.find_one(Q('key', 'eq', key)).anonymous except ModularOdmException: diff --git a/website/project/licenses/__init__.py b/website/project/licenses/__init__.py index 5a701a7ed10c..1d6c570459bb 100644 --- a/website/project/licenses/__init__.py +++ b/website/project/licenses/__init__.py @@ -2,104 +2,25 @@ import os import warnings -from modularodm import fields, Q +from django.apps import apps +from modularodm import Q from osf.exceptions import ValidationError from modularodm import exceptions as modm_exceptions from framework import exceptions as framework_exceptions -from framework.mongo import ( - ObjectId, - StoredObject, - utils as mongo_utils -) from website import exceptions as web_exceptions from website import settings from website.util import permissions -def _serialize(fields, instance): - return { - field: getattr(instance, field) - for field in fields - } - -def serialize_node_license(node_license): - return { - 'id': node_license.license_id, - 'name': node_license.name, - 'text': node_license.text, - } - -def serialize_node_license_record(node_license_record): - if node_license_record is None: - return {} - ret = serialize_node_license(node_license_record.node_license) - ret.update(_serialize(('year', 'copyright_holders'), node_license_record)) - return ret - - -@mongo_utils.unique_on(['id']) -@mongo_utils.unique_on(['name']) -class NodeLicense(StoredObject): - - _id = fields.StringField(primary=True, default=lambda: str(ObjectId())) - - id = fields.StringField( - required=True, - unique=False, # Skip modular-odm's uniqueness implementation, depending on MongoDB's - # instead (the decorator will install the proper index), so that we can - # kludge a non-racey upsert in ensure_licenses. - editable=False - ) - name = fields.StringField( - required=True, - unique=False # Ditto. - ) - text = fields.StringField(required=True) - properties = fields.StringField(list=True) - - -class NodeLicenseRecord(StoredObject): - - _id = fields.StringField(primary=True, default=lambda: str(ObjectId())) - - node_license = fields.ForeignField('nodelicense', required=True) - # Deliberately left as a StringField to support year ranges (e.g. 2012-2015) - year = fields.StringField() - copyright_holders = fields.StringField(list=True) - - @property - def name(self): - return self.node_license.name if self.node_license else None - - @property - def text(self): - return self.node_license.text if self.node_license else None - - @property - def id(self): - return self.node_license.id if self.node_license else None - - def to_json(self): - return serialize_node_license_record(self) - - def copy(self): - copied = NodeLicenseRecord( - node_license=self.node_license, - year=self.year, - copyright_holders=self.copyright_holders - ) - copied.save() - return copied - - def ensure_licenses(warn=True): """Upsert the licenses in our database based on a JSON file. :return tuple: (number inserted, number updated) """ + NodeLicense = apps.get_model('osf.NodeLicense') ninserted = 0 nupdated = 0 with open( @@ -148,6 +69,8 @@ def ensure_licenses(warn=True): def set_license(node, license_detail, auth, node_type='node'): + NodeLicense = apps.get_model('osf.NodeLicense') + NodeLicenseRecord = apps.get_model('osf.NodeLicenseRecord') if node_type not in ['node', 'preprint']: raise ValueError('{} is not a valid node_type argument'.format(node_type)) diff --git a/website/project/model.py b/website/project/model.py index 6519e49869fb..9af6f78769ca 100644 --- a/website/project/model.py +++ b/website/project/model.py @@ -1,68 +1,16 @@ # -*- coding: utf-8 -*- -import datetime -import functools -import itertools import logging -import os import re -import urlparse -import warnings -import jsonschema -import pymongo -import pytz from django.apps import apps from django.core.exceptions import ValidationError -from django.core.urlresolvers import reverse -from django.core.validators import URLValidator -from django.utils import timezone from modularodm import Q -from modularodm import fields -from modularodm.validators import MaxLengthValidator from modularodm.exceptions import KeyExistsException, ValidationValueError -from framework import status -from framework.mongo import ObjectId, DummyRequest -from framework.mongo import StoredObject -from framework.mongo import validators -from framework.mongo import get_request_and_user_id -from framework.addons import AddonModelMixin -from framework.analytics import (get_basic_counters, - increment_user_activity_counters) -from framework.auth import Auth, User, get_user -from framework.auth.utils import privacy_info_handle -from framework.celery_tasks.handlers import enqueue_task -from framework.exceptions import PermissionsError -from framework.guid.model import Guid, GuidStoredObject -from framework.mongo.utils import to_mongo_key, unique_on -from framework.sentry import log_exception -from framework.transactions.context import TokuTransaction -from framework.utils import iso8601format - -from keen import scoped_keys -from website import language, settings -from website.citations.utils import datetime_to_csl -from website.exceptions import (InvalidTagError, NodeStateError, - TagNotFoundError, UserNotAffiliatedError) -from website.identifiers.model import IdentifierMixin -from website.institutions.model import AffiliatedInstitutionsList, Institution -from website.mails import mails -from website.project import signals as project_signals -from website.project import tasks as node_tasks -from website.project.commentable import Commentable +from website import settings from website.project.metadata.schemas import OSF_META_SCHEMAS -from website.project.metadata.utils import create_jsonschema_from_metaschema -from website.project.sanctions import (DraftRegistrationApproval, Embargo, - EmbargoTerminationApproval, - RegistrationApproval, Retraction) -from website.project.licenses import set_license -from website.project.spam.model import SpamMixin -from website.util import (api_url_for, api_v2_url, get_headers_from_request, - sanitize, web_url_for) -from website.util.permissions import (ADMIN, CREATOR_PERMISSIONS, - DEFAULT_CONTRIBUTOR_PERMISSIONS, - expand_permissions, reduce_permissions) +from website.util import sanitize logger = logging.getLogger(__name__) @@ -78,63 +26,6 @@ def has_anonymous_link(node, auth): return auth.private_link.anonymous return False -@unique_on(['name', 'schema_version']) -class MetaSchema(StoredObject): - - _id = fields.StringField(default=lambda: str(ObjectId())) - name = fields.StringField() - schema = fields.DictionaryField() - category = fields.StringField() - - # Version of the schema to use (e.g. if questions, responses change) - schema_version = fields.IntegerField() - - @property - def _config(self): - return self.schema.get('config', {}) - - @property - def requires_approval(self): - return self._config.get('requiresApproval', False) - - @property - def fulfills(self): - return self._config.get('fulfills', []) - - @property - def messages(self): - return self._config.get('messages', {}) - - @property - def requires_consent(self): - return self._config.get('requiresConsent', False) - - @property - def has_files(self): - return self._config.get('hasFiles', False) - - @property - def absolute_api_v2_url(self): - path = '/metaschemas/{}/'.format(self._id) - return api_v2_url(path) - - # used by django and DRF - def get_absolute_url(self): - return self.absolute_api_v2_url - - def validate_metadata(self, metadata, reviewer=False, required_fields=False): - """ - Validates registration_metadata field. - """ - schema = create_jsonschema_from_metaschema(self.schema, required_fields=required_fields, is_reviewer=reviewer) - try: - jsonschema.validate(metadata, schema) - except jsonschema.ValidationError as e: - raise ValidationValueError(e.message) - except jsonschema.SchemaError as e: - raise ValidationValueError(e.message) - return - def ensure_schema(schema, name, version=1): MetaSchema = apps.get_model('osf.MetaSchema') @@ -160,19 +51,9 @@ def ensure_schemas(): ensure_schema(schema, schema['name'], version=schema.get('version', 1)) -class MetaData(GuidStoredObject): - # TODO: This model may be unused; potential candidate for deprecation depending on contents of production database - _id = fields.StringField(primary=True) - - target = fields.AbstractForeignField() - data = fields.DictionaryField() - - date_created = fields.DateTimeField(auto_now_add=datetime.datetime.utcnow) - date_modified = fields.DateTimeField(auto_now=datetime.datetime.utcnow) - - def validate_contributor(guid, contributors): - user = User.load(guid) + OSFUser = apps.get_model('osf.OSFUser') + user = OSFUser.load(guid) if not user or not user.is_claimed: raise ValidationValueError('User does not exist or is not active.') elif user not in contributors: @@ -194,508 +75,6 @@ def get_valid_mentioned_users_guids(comment, contributors): ] return new_mentions -class Comment(GuidStoredObject, SpamMixin, Commentable): - - __guid_min_length__ = 12 - - OVERVIEW = 'node' - FILES = 'files' - WIKI = 'wiki' - - _id = fields.StringField(primary=True) - - user = fields.ForeignField('user', required=True) - # the node that the comment belongs to - node = fields.ForeignField('node', required=True) - # the direct 'parent' of the comment (e.g. the target of a comment reply is another comment) - target = fields.AbstractForeignField(required=True) - # The file or project overview page that the comment is for - root_target = fields.AbstractForeignField() - - date_created = fields.DateTimeField(auto_now_add=datetime.datetime.utcnow) - date_modified = fields.DateTimeField(auto_now_add=datetime.datetime.utcnow, editable=True) - modified = fields.BooleanField(default=False) - is_deleted = fields.BooleanField(default=False) - # The type of root_target: node/files - page = fields.StringField() - content = fields.StringField(required=True, - validate=[validators.comment_maxlength(settings.COMMENT_MAXLENGTH), validators.string_required]) - # The mentioned users - ever_mentioned = fields.ListField(fields.StringField()) - - # For Django compatibility - @property - def pk(self): - return self._id - - @property - def url(self): - return '/{}/'.format(self._id) - - @property - def absolute_api_v2_url(self): - path = '/comments/{}/'.format(self._id) - return api_v2_url(path) - - @property - def target_type(self): - """The object "type" used in the OSF v2 API.""" - return 'comments' - - @property - def root_target_page(self): - """The page type associated with the object/Comment.root_target.""" - return None - - def belongs_to_node(self, node_id): - """Check whether the comment is attached to the specified node.""" - return self.node._id == node_id - - # used by django and DRF - def get_absolute_url(self): - return self.absolute_api_v2_url - - def get_comment_page_url(self): - if isinstance(self.root_target.referent, Node): - return self.node.absolute_url - return settings.DOMAIN + str(self.root_target._id) + '/' - - def get_content(self, auth): - """ Returns the comment content if the user is allowed to see it. Deleted comments - can only be viewed by the user who created the comment.""" - if not auth and not self.node.is_public: - raise PermissionsError - - if self.is_deleted and ((not auth or auth.user.is_anonymous) - or (auth and not auth.user.is_anonymous and self.user._id != auth.user._id)): - return None - - return self.content - - def get_comment_page_title(self): - if self.page == Comment.FILES: - return self.root_target.referent.name - elif self.page == Comment.WIKI: - return self.root_target.referent.page_name - return '' - - def get_comment_page_type(self): - if self.page == Comment.FILES: - return 'file' - elif self.page == Comment.WIKI: - return 'wiki' - return self.node.project_or_component - - @classmethod - def find_n_unread(cls, user, node, page, root_id=None): - if node.is_contributor(user): - if page == Comment.OVERVIEW: - view_timestamp = user.get_node_comment_timestamps(target_id=node._id) - root_target = Guid.load(node._id) - elif page == Comment.FILES or page == Comment.WIKI: - view_timestamp = user.get_node_comment_timestamps(target_id=root_id) - root_target = Guid.load(root_id) - else: - raise ValueError('Invalid page') - return Comment.find(Q('node', 'eq', node) & - Q('user', 'ne', user) & - Q('is_deleted', 'eq', False) & - (Q('date_created', 'gt', view_timestamp) | - Q('date_modified', 'gt', view_timestamp)) & - Q('root_target', 'eq', root_target)).count() - - return 0 - - @classmethod - def create(cls, auth, **kwargs): - comment = cls(**kwargs) - if not comment.node.can_comment(auth): - raise PermissionsError('{0!r} does not have permission to comment on this node'.format(auth.user)) - log_dict = { - 'project': comment.node.parent_id, - 'node': comment.node._id, - 'user': comment.user._id, - 'comment': comment._id, - } - if isinstance(comment.target.referent, Comment): - comment.root_target = comment.target.referent.root_target - else: - comment.root_target = comment.target - - page = getattr(comment.root_target.referent, 'root_target_page', None) - if not page: - raise ValueError('Invalid root target.') - comment.page = page - - log_dict.update(comment.root_target.referent.get_extra_log_params(comment)) - - if comment.content: - new_mentions = get_valid_mentioned_users_guids(comment, comment.node.contributors) - if new_mentions: - project_signals.mention_added.send(comment, new_mentions=new_mentions, auth=auth) - comment.ever_mentioned.extend(new_mentions) - - comment.save() - - comment.node.add_log( - NodeLog.COMMENT_ADDED, - log_dict, - auth=auth, - save=False, - ) - - comment.node.save() - project_signals.comment_added.send(comment, auth=auth) - - return comment - - def edit(self, content, auth, save=False): - if not self.node.can_comment(auth) or self.user._id != auth.user._id: - raise PermissionsError('{0!r} does not have permission to edit this comment'.format(auth.user)) - log_dict = { - 'project': self.node.parent_id, - 'node': self.node._id, - 'user': self.user._id, - 'comment': self._id, - } - log_dict.update(self.root_target.referent.get_extra_log_params(self)) - self.content = content - self.modified = True - self.date_modified = timezone.now() - new_mentions = get_valid_mentioned_users_guids(self, self.node.contributors) - - if save: - if new_mentions: - project_signals.mention_added.send(self, new_mentions=new_mentions, auth=auth) - self.ever_mentioned.extend(new_mentions) - self.save() - self.node.add_log( - NodeLog.COMMENT_UPDATED, - log_dict, - auth=auth, - save=False, - ) - self.node.save() - - def delete(self, auth, save=False): - if not self.node.can_comment(auth) or self.user._id != auth.user._id: - raise PermissionsError('{0!r} does not have permission to comment on this node'.format(auth.user)) - log_dict = { - 'project': self.node.parent_id, - 'node': self.node._id, - 'user': self.user._id, - 'comment': self._id, - } - self.is_deleted = True - log_dict.update(self.root_target.referent.get_extra_log_params(self)) - self.date_modified = timezone.now() - if save: - self.save() - self.node.add_log( - NodeLog.COMMENT_REMOVED, - log_dict, - auth=auth, - save=False, - ) - self.node.save() - - def undelete(self, auth, save=False): - if not self.node.can_comment(auth) or self.user._id != auth.user._id: - raise PermissionsError('{0!r} does not have permission to comment on this node'.format(auth.user)) - self.is_deleted = False - log_dict = { - 'project': self.node.parent_id, - 'node': self.node._id, - 'user': self.user._id, - 'comment': self._id, - } - log_dict.update(self.root_target.referent.get_extra_log_params(self)) - self.date_modified = timezone.now() - if save: - self.save() - self.node.add_log( - NodeLog.COMMENT_RESTORED, - log_dict, - auth=auth, - save=False, - ) - self.node.save() - - -@unique_on(['params.node', '_id']) -class NodeLog(StoredObject): - - _id = fields.StringField(primary=True, default=lambda: str(ObjectId())) - __indices__ = [{ - 'key_or_list': [ - ('user', 1), - ('node', 1) - ], - }, { - 'key_or_list': [ - ('node', 1), - ('should_hide', 1), - ('date', -1) - ] - }] - - date = fields.DateTimeField(default=datetime.datetime.utcnow, index=True) - action = fields.StringField(index=True) - params = fields.DictionaryField() - should_hide = fields.BooleanField(default=False) - original_node = fields.ForeignField('node', index=True) - node = fields.ForeignField('node', index=True) - - was_connected_to = fields.ForeignField('node', list=True) - - user = fields.ForeignField('user', index=True) - foreign_user = fields.StringField() - - DATE_FORMAT = '%m/%d/%Y %H:%M UTC' - - # Log action constants -- NOTE: templates stored in log_templates.mako - CREATED_FROM = 'created_from' - - PROJECT_CREATED = 'project_created' - PROJECT_REGISTERED = 'project_registered' - PROJECT_DELETED = 'project_deleted' - - NODE_CREATED = 'node_created' - NODE_FORKED = 'node_forked' - NODE_REMOVED = 'node_removed' - - POINTER_CREATED = 'pointer_created' - POINTER_FORKED = 'pointer_forked' - POINTER_REMOVED = 'pointer_removed' - - WIKI_UPDATED = 'wiki_updated' - WIKI_DELETED = 'wiki_deleted' - WIKI_RENAMED = 'wiki_renamed' - - MADE_WIKI_PUBLIC = 'made_wiki_public' - MADE_WIKI_PRIVATE = 'made_wiki_private' - - CONTRIB_ADDED = 'contributor_added' - CONTRIB_REMOVED = 'contributor_removed' - CONTRIB_REORDERED = 'contributors_reordered' - - CHECKED_IN = 'checked_in' - CHECKED_OUT = 'checked_out' - - PERMISSIONS_UPDATED = 'permissions_updated' - - MADE_PRIVATE = 'made_private' - MADE_PUBLIC = 'made_public' - - TAG_ADDED = 'tag_added' - TAG_REMOVED = 'tag_removed' - - FILE_TAG_ADDED = 'file_tag_added' - FILE_TAG_REMOVED = 'file_tag_removed' - - EDITED_TITLE = 'edit_title' - EDITED_DESCRIPTION = 'edit_description' - CHANGED_LICENSE = 'license_changed' - - UPDATED_FIELDS = 'updated_fields' - - FILE_MOVED = 'addon_file_moved' - FILE_COPIED = 'addon_file_copied' - FILE_RENAMED = 'addon_file_renamed' - - FOLDER_CREATED = 'folder_created' - - FILE_ADDED = 'file_added' - FILE_UPDATED = 'file_updated' - FILE_REMOVED = 'file_removed' - FILE_RESTORED = 'file_restored' - - ADDON_ADDED = 'addon_added' - ADDON_REMOVED = 'addon_removed' - COMMENT_ADDED = 'comment_added' - COMMENT_REMOVED = 'comment_removed' - COMMENT_UPDATED = 'comment_updated' - COMMENT_RESTORED = 'comment_restored' - - CITATION_ADDED = 'citation_added' - CITATION_EDITED = 'citation_edited' - CITATION_REMOVED = 'citation_removed' - - MADE_CONTRIBUTOR_VISIBLE = 'made_contributor_visible' - MADE_CONTRIBUTOR_INVISIBLE = 'made_contributor_invisible' - - EXTERNAL_IDS_ADDED = 'external_ids_added' - - EMBARGO_APPROVED = 'embargo_approved' - EMBARGO_CANCELLED = 'embargo_cancelled' - EMBARGO_COMPLETED = 'embargo_completed' - EMBARGO_INITIATED = 'embargo_initiated' - EMBARGO_TERMINATED = 'embargo_terminated' - - RETRACTION_APPROVED = 'retraction_approved' - RETRACTION_CANCELLED = 'retraction_cancelled' - RETRACTION_INITIATED = 'retraction_initiated' - - REGISTRATION_APPROVAL_CANCELLED = 'registration_cancelled' - REGISTRATION_APPROVAL_INITIATED = 'registration_initiated' - REGISTRATION_APPROVAL_APPROVED = 'registration_approved' - PREREG_REGISTRATION_INITIATED = 'prereg_registration_initiated' - - AFFILIATED_INSTITUTION_ADDED = 'affiliated_institution_added' - AFFILIATED_INSTITUTION_REMOVED = 'affiliated_institution_removed' - - PREPRINT_INITIATED = 'preprint_initiated' - PREPRINT_FILE_UPDATED = 'preprint_file_updated' - PREPRINT_LICENSE_UPDATED = 'preprint_license_updated' - - actions = [CHECKED_IN, CHECKED_OUT, FILE_TAG_REMOVED, FILE_TAG_ADDED, CREATED_FROM, PROJECT_CREATED, PROJECT_REGISTERED, PROJECT_DELETED, NODE_CREATED, NODE_FORKED, NODE_REMOVED, POINTER_CREATED, POINTER_FORKED, POINTER_REMOVED, WIKI_UPDATED, WIKI_DELETED, WIKI_RENAMED, MADE_WIKI_PUBLIC, MADE_WIKI_PRIVATE, CONTRIB_ADDED, CONTRIB_REMOVED, CONTRIB_REORDERED, PERMISSIONS_UPDATED, MADE_PRIVATE, MADE_PUBLIC, TAG_ADDED, TAG_REMOVED, EDITED_TITLE, EDITED_DESCRIPTION, UPDATED_FIELDS, FILE_MOVED, FILE_COPIED, FOLDER_CREATED, FILE_ADDED, FILE_UPDATED, FILE_REMOVED, FILE_RESTORED, ADDON_ADDED, ADDON_REMOVED, COMMENT_ADDED, COMMENT_REMOVED, COMMENT_UPDATED, MADE_CONTRIBUTOR_VISIBLE, MADE_CONTRIBUTOR_INVISIBLE, EXTERNAL_IDS_ADDED, EMBARGO_APPROVED, EMBARGO_CANCELLED, EMBARGO_COMPLETED, EMBARGO_INITIATED, RETRACTION_APPROVED, RETRACTION_CANCELLED, RETRACTION_INITIATED, REGISTRATION_APPROVAL_CANCELLED, REGISTRATION_APPROVAL_INITIATED, REGISTRATION_APPROVAL_APPROVED, PREREG_REGISTRATION_INITIATED, CITATION_ADDED, CITATION_EDITED, CITATION_REMOVED, AFFILIATED_INSTITUTION_ADDED, AFFILIATED_INSTITUTION_REMOVED, PREPRINT_INITIATED, PREPRINT_FILE_UPDATED] - - def __repr__(self): - return ('').format(self=self) - - # For Django compatibility - @property - def pk(self): - return self._id - - def clone_node_log(self, node_id): - """ - When a node is forked or registered, all logs on the node need to be cloned for the fork or registration. - :param node_id: - :return: cloned log - """ - original_log = self.load(self._id) - node = Node.find(Q('_id', 'eq', node_id))[0] - log_clone = original_log.clone() - log_clone.node = node - log_clone.original_node = original_log.original_node - log_clone.user = original_log.user - log_clone.save() - return log_clone - - @property - def tz_date(self): - '''Return the timezone-aware date. - ''' - # Date should always be defined, but a few logs in production are - # missing dates; return None and log error if date missing - if self.date: - return self.date.replace(tzinfo=pytz.UTC) - logger.error('Date missing on NodeLog {}'.format(self._primary_key)) - - @property - def formatted_date(self): - '''Return the timezone-aware, ISO-formatted string representation of - this log's date. - ''' - if self.tz_date: - return self.tz_date.isoformat() - - def can_view(self, node, auth): - return node.can_view(auth) - - def _render_log_contributor(self, contributor, anonymous=False): - user = User.load(contributor) - if not user: - # Handle legacy non-registered users, which were - # represented as a dict - if isinstance(contributor, dict): - if 'nr_name' in contributor: - return { - 'fullname': contributor['nr_name'], - 'registered': False, - } - return None - if self.node: - fullname = user.display_full_name(node=self.node) - else: - fullname = user.fullname - return { - 'id': privacy_info_handle(user._primary_key, anonymous), - 'fullname': privacy_info_handle(fullname, anonymous, name=True), - 'registered': user.is_registered, - } - - @property - def absolute_api_v2_url(self): - path = '/logs/{}/'.format(self._id) - return api_v2_url(path) - - def get_absolute_url(self): - return self.absolute_api_v2_url - - @property - def absolute_url(self): - return self.absolute_api_v2_url - - -class Tag(StoredObject): - - _id = fields.StringField(primary=True, validate=MaxLengthValidator(128)) - lower = fields.StringField(index=True, validate=MaxLengthValidator(128)) - - def __init__(self, _id, lower=None, **kwargs): - super(Tag, self).__init__(_id=_id, lower=lower or _id.lower(), **kwargs) - - def __repr__(self): - return ''.format(self=self) - - @property - def url(self): - return '/search/?tags={}'.format(self._id) - - -class Pointer(StoredObject): - """A link to a Node. The Pointer delegates all but a few methods to its - contained Node. Forking and registration are overridden such that the - link is cloned, but its contained Node is not. - """ - #: Whether this is a pointer or not - primary = False - - _id = fields.StringField(primary=True, default=lambda: str(ObjectId())) - # Previous 5-character ID. Unused in application code. - # These were migrated to ObjectIds to prevent clashes with GUIDs. - _legacy_id = fields.StringField() - node = fields.ForeignField('node') - - _meta = {'optimistic': True} - - def _clone(self): - if self.node: - clone = self.clone() - clone.node = self.node - clone.save() - return clone - - def fork_node(self, *args, **kwargs): - return self._clone() - - def register_node(self, *args, **kwargs): - return self._clone() - - def use_as_template(self, *args, **kwargs): - return self._clone() - - def resolve(self): - return self.node - - def __getattr__(self, item): - """Delegate attribute access to the node being pointed to.""" - # Prevent backref lookups from being overriden by proxied node - try: - return super(Pointer, self).__getattr__(item) - except AttributeError: - pass - if self.node: - return getattr(self.node, item) - raise AttributeError( - 'Pointer object has no attribute {0}'.format( - item - ) - ) - def get_pointer_parent(pointer): """Given a `Pointer` object, return its parent node. @@ -736,9 +115,10 @@ def validate_title(value): def validate_user(value): + OSFUser = apps.get_model('osf.OSFUser') if value != {}: user_id = value.iterkeys().next() - if User.find(Q('_id', 'eq', user_id)).count() != 1: + if OSFUser.find(Q('_id', 'eq', user_id)).count() != 1: raise ValidationValueError('User does not exist.') return True @@ -754,3635 +134,3 @@ def validate_doi(value): if value and not re.match(r'\b(10\.\d{4,}(?:\.\d+)*/\S+(?:(?!["&\'<>])\S))\b', value): raise ValidationValueError('"{}" is not a valid DOI'.format(value)) return True - - -class Node(GuidStoredObject, AddonModelMixin, IdentifierMixin, Commentable, SpamMixin): - - #: Whether this is a pointer or not - primary = True - - __indices__ = [ - { - 'unique': False, - 'key_or_list': [ - ('date_modified', pymongo.DESCENDING), - ] - }, - # Dollar sign indexes don't actually do anything - # This index has been moved to scripts/indices.py#L30 - # { - # 'unique': False, - # 'key_or_list': [ - # ('tags.$', pymongo.ASCENDING), - # ('is_public', pymongo.ASCENDING), - # ('is_deleted', pymongo.ASCENDING), - # ('institution_id', pymongo.ASCENDING), - # ] - # }, - { - 'unique': False, - 'key_or_list': [ - ('is_deleted', pymongo.ASCENDING), - ('is_collection', pymongo.ASCENDING), - ('is_public', pymongo.ASCENDING), - ('institution_id', pymongo.ASCENDING), - ('is_registration', pymongo.ASCENDING), - ('date_modified', pymongo.ASCENDING), - ] - }, - { - 'unique': False, - 'key_or_list': [ - ('institution_id', pymongo.ASCENDING), - ('institution_domains', pymongo.ASCENDING), - ] - }, - { - 'unique': False, - 'key_or_list': [ - ('institution_id', pymongo.ASCENDING), - ('institution_email_domains', pymongo.ASCENDING), - ] - }, - { - 'unique': False, - 'key_or_list': [ - ('institution_id', pymongo.ASCENDING), - ('registration_approval', pymongo.ASCENDING), - ] - }, - ] - - # Node fields that trigger an update to Solr on save - SEARCH_UPDATE_FIELDS = { - 'title', - 'category', - 'description', - 'visible_contributor_ids', - 'tags', - 'is_fork', - 'is_registration', - 'retraction', - 'embargo', - 'is_public', - 'is_deleted', - 'wiki_pages_current', - 'is_retracted', - 'node_license', - '_affiliated_institutions', - 'preprint_file', - } - - # Node fields that trigger a check to the spam filter on save - SPAM_CHECK_FIELDS = { - 'title', - 'description', - 'wiki_pages_current', - } - - # Fields that are writable by Node.update - WRITABLE_WHITELIST = [ - 'title', - 'description', - 'category', - 'is_public', - 'node_license', - ] - - # Named constants - PRIVATE = 'private' - PUBLIC = 'public' - - _id = fields.StringField(primary=True) - - date_created = fields.DateTimeField(auto_now_add=datetime.datetime.utcnow, index=True) - date_modified = fields.DateTimeField() - - # Privacy - is_public = fields.BooleanField(default=False, index=True) - - # User mappings - permissions = fields.DictionaryField() - visible_contributor_ids = fields.StringField(list=True) - - # Project Organization - is_bookmark_collection = fields.BooleanField(default=False, index=True) - is_collection = fields.BooleanField(default=False, index=True) - - is_deleted = fields.BooleanField(default=False, index=True) - deleted_date = fields.DateTimeField(index=True) - suspended = fields.BooleanField(default=False) - - is_registration = fields.BooleanField(default=False, index=True) - registered_date = fields.DateTimeField(index=True) - registered_user = fields.ForeignField('user') - - # Preprint fields - preprint_file = fields.ForeignField('StoredFileNode') - preprint_article_doi = fields.StringField(validate=validate_doi) - _is_preprint_orphan = fields.BooleanField(default=False) - _has_abandoned_preprint = fields.BooleanField(default=False) - - # A list of all MetaSchemas for which this Node has registered_meta - registered_schema = fields.ForeignField('metaschema', list=True, default=list) - # A set of : pairs, where is a - # flat set of : pairs-- these question ids_above - # map the the ids in the registrations MetaSchema (see registered_schema). - # { - # : { - # 'value': , - # 'comments': [ - # - # ] - # } - registered_meta = fields.DictionaryField() - registration_approval = fields.ForeignField('registrationapproval') - retraction = fields.ForeignField('retraction') - embargo = fields.ForeignField('embargo') - embargo_termination_approval = fields.ForeignField('embargoterminationapproval') - - is_fork = fields.BooleanField(default=False, index=True) - forked_date = fields.DateTimeField(index=True) - - title = fields.StringField(validate=validate_title) - description = fields.StringField() - category = fields.StringField(validate=validate_category, index=True) - - node_license = fields.ForeignField('nodelicenserecord') - - # One of 'public', 'private' - # TODO: Add validator - comment_level = fields.StringField(default='public') - - wiki_pages_current = fields.DictionaryField() - wiki_pages_versions = fields.DictionaryField() - # Dictionary field mapping node wiki page to sharejs private uuid. - # {: } - wiki_private_uuids = fields.DictionaryField() - file_guid_to_share_uuids = fields.DictionaryField() - - creator = fields.ForeignField('user', index=True) - contributors = fields.ForeignField('user', list=True) - users_watching_node = fields.ForeignField('user', list=True) - - tags = fields.ForeignField('tag', list=True) - - # Tags for internal use - system_tags = fields.StringField(list=True) - - nodes = fields.AbstractForeignField(list=True, backref='parent') - forked_from = fields.ForeignField('node', index=True) - registered_from = fields.ForeignField('node', index=True) - root = fields.ForeignField('node', index=True) - parent_node = fields.ForeignField('node', index=True) - - # The node (if any) used as a template for this node's creation - template_node = fields.ForeignField('node', index=True) - - keenio_read_key = fields.StringField() - - # Dictionary field mapping user id to a list of nodes in node.nodes which the user has subscriptions for - # {: [, , ...] } - child_node_subscriptions = fields.DictionaryField(default=dict) - - alternative_citations = fields.ForeignField('alternativecitation', list=True) - - _meta = { - 'optimistic': True, - } - - def __init__(self, *args, **kwargs): - - kwargs.pop('logs', []) - - super(Node, self).__init__(*args, **kwargs) - - if kwargs.get('_is_loaded', False): - return - - # Ensure when Node is created with tags through API, tags are added to Tag - tags = kwargs.pop('tags', []) - for tag in tags: - self.add_tag(tag, Auth(self.creator), save=False, log=False) - - if self.creator: - self.contributors.append(self.creator) - self.set_visible(self.creator, visible=True, log=False) - - # Add default creator permissions - for permission in CREATOR_PERMISSIONS: - self.add_permission(self.creator, permission, save=False) - - def __repr__(self): - return ('').format(self=self) - - # For Django compatibility - @property - def pk(self): - return self._id - - # For Comment API compatibility - @property - def target_type(self): - """The object "type" used in the OSF v2 API.""" - return 'nodes' - - @property - def root_target_page(self): - """The comment page type associated with Nodes.""" - return Comment.OVERVIEW - - def belongs_to_node(self, node_id): - """Check whether this node matches the specified node.""" - return self._id == node_id - - @property - def logs(self): - """ List of logs associated with this node""" - return NodeLog.find(Q('node', 'eq', self._id)).sort('date') - - @property - def license(self): - node_license = self.node_license - if not node_license and self.parent_node: - return self.parent_node.license - return node_license - - @property - def category_display(self): - """The human-readable representation of this node's category.""" - return settings.NODE_CATEGORY_MAP[self.category] - - # We need the following 2 properties in order to serialize related links in NodeRegistrationSerializer - @property - def registered_user_id(self): - """The ID of the user who registered this node if this is a registration, else None. - """ - if self.registered_user: - return self.registered_user._id - return None - - @property - def registered_from_id(self): - """The ID of the node that was registered, else None. - """ - if self.registered_from: - return self.registered_from._id - return None - - @property - def sanction(self): - sanction = self.embargo_termination_approval or self.retraction or self.embargo or self.registration_approval - if sanction: - return sanction - elif self.parent_node: - return self.parent_node.sanction - else: - return None - - @property - def is_pending_registration(self): - if not self.is_registration: - return False - if self.registration_approval is None: - if self.parent_node: - return self.parent_node.is_pending_registration - return False - return self.registration_approval.is_pending_approval - - @property - def is_registration_approved(self): - if self.registration_approval is None: - if self.parent_node: - return self.parent_node.is_registration_approved - return False - return self.registration_approval.is_approved - - @property - def is_retracted(self): - if self.retraction is None: - if self.parent_node: - return self.parent_node.is_retracted - return False - return self.retraction.is_approved - - @property - def is_pending_retraction(self): - if self.retraction is None: - if self.parent_node: - return self.parent_node.is_pending_retraction - return False - return self.retraction.is_pending_approval - - @property - def embargo_end_date(self): - if self.embargo is None: - if self.parent_node: - return self.parent_node.embargo_end_date - return False - return self.embargo.end_date - - @property - def is_pending_embargo(self): - if self.embargo is None: - if self.parent_node: - return self.parent_node.is_pending_embargo - return False - return self.embargo.is_pending_approval - - @property - def is_pending_embargo_for_existing_registration(self): - """ Returns True if Node has an Embargo pending approval for an - existing registrations. This is used specifically to ensure - registrations pre-dating the Embargo feature do not get deleted if - their respective Embargo request is rejected. - """ - if self.embargo is None: - if self.parent_node: - return self.parent_node.is_pending_embargo_for_existing_registration - return False - return self.embargo.pending_registration - - @property - def is_embargoed(self): - """A Node is embargoed if: - - it has an associated Embargo record - - that record has been approved - - the node is not public (embargo not yet lifted) - """ - if self.embargo is None: - if self.parent_node: - return self.parent_node.is_embargoed - return self.embargo and self.embargo.is_approved and not self.is_public - - @property - def private_links(self): - # TODO: Consumer code assumes this is a list. Hopefully there aren't many links? - return list(PrivateLink.find(Q('nodes', 'eq', self._id))) - - @property - def private_links_active(self): - return [x for x in self.private_links if not x.is_deleted] - - @property - def private_link_keys_active(self): - return [x.key for x in self.private_links if not x.is_deleted] - - @property - def private_link_keys_deleted(self): - return [x.key for x in self.private_links if x.is_deleted] - - def path_above(self, auth): - parents = self.parents - return '/' + '/'.join([p.title if p.can_view(auth) else '-- private project --' for p in reversed(parents)]) - - @property - def ids_above(self): - parents = self.parents - return {p._id for p in parents} - - @property - def nodes_active(self): - return [x for x in self.nodes if not x.is_deleted] - - @property - def draft_registrations_active(self): - drafts = DraftRegistration.find( - Q('branched_from', 'eq', self) - ) - for draft in drafts: - if not draft.registered_node or draft.registered_node.is_deleted: - yield draft - - @property - def has_active_draft_registrations(self): - try: - next(self.draft_registrations_active) - except StopIteration: - return False - else: - return True - - @property - def is_preprint(self): - if not self.preprint_file or not self.is_public: - return False - if self.preprint_file.node == self: - self._is_preprint_orphan = False - return True - else: - self._is_preprint_orphan = True - return False - - @property - def is_preprint_orphan(self): - if (not self.is_preprint) and self._is_preprint_orphan: - return True - return False - - @property - def preprints(self): - from website.preprints.model import PreprintService - if not self.is_preprint: - return [] - return PreprintService.find(Q('node', 'eq', self)) - - @property - def preprint_url(self): - if self.is_preprint: - try: - return self.preprints[0].url - except IndexError: - pass - - def can_edit(self, auth=None, user=None): - """Return if a user is authorized to edit this node. - Must specify one of (`auth`, `user`). - - :param Auth auth: Auth object to check - :param User user: User object to check - :returns: Whether user has permission to edit this node. - """ - if not auth and not user: - raise ValueError('Must pass either `auth` or `user`') - if auth and user: - raise ValueError('Cannot pass both `auth` and `user`') - user = user or auth.user - if auth: - is_api_node = auth.api_node == self - else: - is_api_node = False - return ( - (user and self.has_permission(user, 'write')) - or is_api_node - ) - - def active_contributors(self, include=lambda n: True): - for contrib in self.contributors: - if contrib.is_active and include(contrib): - yield contrib - - def is_admin_parent(self, user): - if self.has_permission(user, 'admin', check_parent=False): - return True - if self.parent_node: - return self.parent_node.is_admin_parent(user) - return False - - def can_view(self, auth): - if auth and getattr(auth.private_link, 'anonymous', False): - return self._id in auth.private_link.nodes - - if not auth and not self.is_public: - return False - - return ( - self.is_public or - (auth.user and self.has_permission(auth.user, 'read')) or - auth.private_key in self.private_link_keys_active or - self.is_admin_parent(auth.user) - ) - - def is_derived_from(self, other, attr): - derived_from = getattr(self, attr) - while True: - if derived_from is None: - return False - if derived_from == other: - return True - derived_from = getattr(derived_from, attr) - - def is_fork_of(self, other): - return self.is_derived_from(other, 'forked_from') - - def is_registration_of(self, other): - return self.is_derived_from(other, 'registered_from') - - @property - def forks(self): - """List of forks of this node""" - return Node.find(Q('forked_from', 'eq', self._id) & - Q('is_deleted', 'eq', False) - & Q('is_registration', 'ne', True)) - - def add_permission(self, user, permission, save=False): - """Grant permission to a user. - - :param User user: User to grant permission to - :param str permission: Permission to grant - :param bool save: Save changes - :raises: ValueError if user already has permission - """ - if user._id not in self.permissions: - self.permissions[user._id] = [permission] - else: - if permission in self.permissions[user._id]: - raise ValueError('User already has permission {0}'.format(permission)) - self.permissions[user._id].append(permission) - if save: - self.save() - - def remove_permission(self, user, permission, save=False): - """Revoke permission from a user. - - :param User user: User to revoke permission from - :param str permission: Permission to revoke - :param bool save: Save changes - :raises: ValueError if user does not have permission - """ - try: - self.permissions[user._id].remove(permission) - except (KeyError, ValueError): - raise ValueError('User does not have permission {0}'.format(permission)) - if save: - self.save() - - def clear_permission(self, user, save=False): - """Clear all permissions for a user. - - :param User user: User to revoke permission from - :param bool save: Save changes - :raises: ValueError if user not in permissions - """ - try: - self.permissions.pop(user._id) - except KeyError: - raise ValueError( - 'User {0} not in permissions list for node {1}'.format( - user._id, self._id, - ) - ) - if save: - self.save() - - def set_permissions(self, user, permissions, validate=True, save=False): - # Ensure that user's permissions cannot be lowered if they are the only admin - if validate and reduce_permissions(self.permissions[user._id]) == ADMIN and reduce_permissions(permissions) != ADMIN: - reduced_permissions = [ - reduce_permissions(perms) for user_id, perms in self.permissions.iteritems() - if user_id != user._id - ] - if ADMIN not in reduced_permissions: - raise NodeStateError('Must have at least one registered admin contributor') - self.permissions[user._id] = permissions - if save: - self.save() - - def has_permission(self, user, permission, check_parent=True): - """Check whether user has permission. - - :param User user: User to test - :param str permission: Required permission - :returns: User has required permission - """ - if user is None: - return False - if permission in self.permissions.get(user._id, []): - return True - if permission == 'read' and check_parent: - return self.is_admin_parent(user) - return False - - def has_permission_on_children(self, user, permission): - """Checks if the given user has a given permission on any child nodes - that are not registrations or deleted - """ - if self.has_permission(user, permission): - return True - - for node in self.nodes: - if not node.primary or node.is_deleted: - continue - - if node.has_permission_on_children(user, permission): - return True - - return False - - def find_readable_antecedent(self, auth): - """ Returns first antecendant node readable by . - """ - next_parent = self.parent_node - while next_parent: - if next_parent.can_view(auth): - return next_parent - next_parent = next_parent.parent_node - - def find_readable_descendants(self, auth): - """ Returns a generator of first descendant node(s) readable by - in each descendant branch. - """ - new_branches = [] - for node in self.nodes: - if not node.primary or node.is_deleted: - continue - - if node.can_view(auth): - yield node - else: - new_branches.append(node) - - for bnode in new_branches: - for node in bnode.find_readable_descendants(auth): - yield node - - def has_addon_on_children(self, addon): - """Checks if a given node has a specific addon on child nodes - that are not registrations or deleted - """ - if self.has_addon(addon): - return True - - for node in self.nodes: - if not node.primary or node.is_deleted: - continue - - if node.has_addon_on_children(addon): - return True - - return False - - def get_permissions(self, user): - """Get list of permissions for user. - - :param User user: User to check - :returns: List of permissions - :raises: ValueError if user not found in permissions - """ - return self.permissions.get(user._id, []) - - def adjust_permissions(self): - for key in self.permissions.keys(): - if key not in self.contributors: - self.permissions.pop(key) - - @property - def visible_contributors(self): - return [ - User.load(_id) - for _id in self.visible_contributor_ids - ] - - @property - def parents(self): - if self.parent_node: - return [self.parent_node] + self.parent_node.parents - return [] - - @property - def admin_contributor_ids(self, contributors=None): - contributor_ids = self.contributors._to_primary_keys() - admin_ids = set() - for parent in self.parents: - admins = [ - user for user, perms in parent.permissions.iteritems() - if 'admin' in perms - ] - admin_ids.update(set(admins).difference(contributor_ids)) - return admin_ids - - @property - def admin_contributors(self): - return sorted( - [User.load(_id) for _id in self.admin_contributor_ids], - key=lambda user: user.family_name, - ) - - def get_visible(self, user): - if not self.is_contributor(user): - raise ValueError(u'User {0} not in contributors'.format(user)) - return user._id in self.visible_contributor_ids - - def update_visible_ids(self, save=False): - """Update the order of `visible_contributor_ids`. Updating on making - a contributor visible is more efficient than recomputing order on - accessing `visible_contributors`. - """ - self.visible_contributor_ids = [ - contributor._id - for contributor in self.contributors - if contributor._id in self.visible_contributor_ids - ] - if save: - self.save() - - def set_visible(self, user, visible, log=True, auth=None, save=False): - if not self.is_contributor(user): - raise ValueError(u'User {0} not in contributors'.format(user)) - if visible and user._id not in self.visible_contributor_ids: - self.visible_contributor_ids.append(user._id) - self.update_visible_ids(save=False) - elif not visible and user._id in self.visible_contributor_ids: - if len(self.visible_contributor_ids) == 1: - raise ValueError('Must have at least one visible contributor') - self.visible_contributor_ids.remove(user._id) - else: - return - message = ( - NodeLog.MADE_CONTRIBUTOR_VISIBLE - if visible - else NodeLog.MADE_CONTRIBUTOR_INVISIBLE - ) - if log: - self.add_log( - message, - params={ - 'parent': self.parent_id, - 'node': self._id, - 'contributors': [user._id], - }, - auth=auth, - save=False, - ) - if save: - self.save() - - def can_comment(self, auth): - if self.comment_level == 'public': - return auth.logged_in and ( - self.is_public or - (auth.user and self.has_permission(auth.user, 'read')) - ) - return self.is_contributor(auth.user) - - def set_node_license(self, license_detail, auth, save=False): - - license_record, license_changed = set_license(self, license_detail, auth) - - if license_changed: - self.add_log( - action=NodeLog.CHANGED_LICENSE, - params={ - 'parent_node': self.parent_id, - 'node': self._primary_key, - 'new_license': license_record.node_license.name - }, - auth=auth, - save=False, - ) - - if save: - self.save() - - def generate_keenio_read_key(self): - return scoped_keys.encrypt(settings.KEEN['public']['master_key'], options={ - 'filters': [{ - 'property_name': 'node.id', - 'operator': 'eq', - 'property_value': str(self._id) - }], - 'allowed_operations': ['read'] - }) - - def subscribe_user_to_notifications(self, user): - """ Update the notification settings for the creator or contributors - - :param user: User to subscribe to notifications - """ - from website.notifications.utils import to_subscription_key - from website.notifications.utils import get_global_notification_type - from website.notifications.model import NotificationSubscription - - events = ['file_updated', 'comments', 'mentions'] - notification_type = 'email_transactional' - target_id = self._id - - for event in events: - event_id = to_subscription_key(target_id, event) - global_event_id = to_subscription_key(user._id, 'global_' + event) - global_subscription = NotificationSubscription.load(global_event_id) - - subscription = NotificationSubscription.load(event_id) - if not subscription: - subscription = NotificationSubscription(_id=event_id, owner=self, event_name=event) - if global_subscription: - global_notification_type = get_global_notification_type(global_subscription, user) - subscription.add_user_to_subscription(user, global_notification_type) - else: - subscription.add_user_to_subscription(user, notification_type) - subscription.save() - - def update(self, fields, auth=None, save=True): - """Update the node with the given fields. - - :param dict fields: Dictionary of field_name:value pairs. - :param Auth auth: Auth object for the user making the update. - :param bool save: Whether to save after updating the object. - """ - if not fields: # Bail out early if there are no fields to update - return False - values = {} - for key, value in fields.iteritems(): - if key not in self.WRITABLE_WHITELIST: - continue - if self.is_registration and key != 'is_public': - raise NodeUpdateError(reason='Registered content cannot be updated', key=key) - # Title and description have special methods for logging purposes - if key == 'title': - if not self.is_bookmark_collection: - self.set_title(title=value, auth=auth, save=False) - else: - raise NodeUpdateError(reason='Bookmark collections cannot be renamed.', key=key) - elif key == 'description': - self.set_description(description=value, auth=auth, save=False) - elif key == 'is_public': - self.set_privacy( - Node.PUBLIC if value else Node.PRIVATE, - auth=auth, - log=True, - save=False - ) - elif key == 'node_license': - self.set_node_license( - { - 'id': value.get('id'), - 'year': value.get('year'), - 'copyrightHolders': value.get('copyrightHolders') or value.get('copyright_holders', []) - }, - auth, - save=save - ) - else: - with warnings.catch_warnings(): - try: - # This is in place because historically projects and components - # live on different ElasticSearch indexes, and at the time of Node.save - # there is no reliable way to check what the old Node.category - # value was. When the cateogory changes it is possible to have duplicate/dead - # search entries, so always delete the ES doc on categoryt change - # TODO: consolidate Node indexes into a single index, refactor search - if key == 'category': - self.delete_search_entry() - ############### - old_value = getattr(self, key) - if old_value != value: - values[key] = { - 'old': old_value, - 'new': value, - } - setattr(self, key, value) - except AttributeError: - raise NodeUpdateError(reason="Invalid value for attribute '{0}'".format(key), key=key) - except warnings.Warning: - raise NodeUpdateError(reason="Attribute '{0}' doesn't exist on the Node class".format(key), key=key) - if save: - updated = self.save() - else: - updated = [] - for key in values: - values[key]['new'] = getattr(self, key) - if values: - self.add_log( - NodeLog.UPDATED_FIELDS, - params={ - 'node': self._id, - 'updated_fields': { - key: { - 'old': values[key]['old'], - 'new': values[key]['new'] - } - for key in values - } - }, - auth=auth) - return updated - - def save(self, *args, **kwargs): - self.adjust_permissions() - - first_save = not self._is_loaded - - if first_save and self.is_bookmark_collection: - existing_bookmark_collections = Node.find( - Q('is_bookmark_collection', 'eq', True) & Q('contributors', 'eq', self.creator._id) & Q('is_deleted', 'eq', False) - ) - if existing_bookmark_collections.count() > 0: - raise NodeStateError('Only one bookmark collection allowed per user.') - - # Bookmark collections are always named 'Bookmarks' - if self.is_bookmark_collection and self.title != 'Bookmarks': - self.title = 'Bookmarks' - - is_original = not self.is_registration and not self.is_fork - if 'suppress_log' in kwargs.keys(): - suppress_log = kwargs['suppress_log'] - del kwargs['suppress_log'] - else: - suppress_log = False - - self.root = self._root._id - self.parent_node = self._parent_node - - # If you're saving a property, do it above this super call - saved_fields = super(Node, self).save(*args, **kwargs) - - if first_save and is_original and not suppress_log: - # TODO: This logic also exists in self.use_as_template() - for addon in settings.ADDONS_AVAILABLE: - if 'node' in addon.added_default: - self.add_addon(addon.short_name, auth=None, log=False) - - # Define log fields for non-component project - log_action = NodeLog.PROJECT_CREATED - log_params = { - 'node': self._primary_key, - } - - if getattr(self, 'parent', None): - # Append log to parent - self.parent.nodes.append(self) - self.parent.save() - log_params.update({'parent_node': self.parent._primary_key}) - - # Add log with appropriate fields - self.add_log( - log_action, - params=log_params, - auth=Auth(user=self.creator), - log_date=self.date_created, - save=True, - ) - - project_signals.project_created.send(self) - - if saved_fields: - self.on_update(first_save, saved_fields) - - if 'node_license' in saved_fields: - children = [c for c in self.get_descendants_recursive( - include=lambda n: n.node_license is None - ) if c.is_public and not c.is_deleted] - # this returns generator, that would get unspooled anyways - while len(children): - batch = children[:99] - Node.bulk_update_search(batch) - children = children[99:] - - # Return expected value for StoredObject::save - return saved_fields - - ###################################### - # Methods that return a new instance # - ###################################### - - def use_as_template(self, auth, changes=None, top_level=True): - """Create a new project, using an existing project as a template. - - :param auth: The user to be assigned as creator - :param changes: A dictionary of changes, keyed by node id, which - override the attributes of the template project or its - children. - :return: The `Node` instance created. - """ - changes = changes or dict() - - # build the dict of attributes to change for the new node - try: - attributes = changes[self._id] - # TODO: explicitly define attributes which may be changed. - except (AttributeError, KeyError): - attributes = dict() - - new = self.clone() - - # clear permissions, which are not cleared by the clone method - new.permissions = {} - new.visible_contributor_ids = [] - - # Clear quasi-foreign fields - new.wiki_pages_current = {} - new.wiki_pages_versions = {} - new.wiki_private_uuids = {} - new.file_guid_to_share_uuids = {} - - # set attributes which may be overridden by `changes` - new.is_public = False - new.description = None - - # apply `changes` - for attr, val in attributes.iteritems(): - setattr(new, attr, val) - - # set attributes which may NOT be overridden by `changes` - new.creator = auth.user - new.template_node = self - new.add_contributor(contributor=auth.user, permissions=CREATOR_PERMISSIONS, log=False, save=False) - new.is_fork = False - new.is_registration = False - new.node_license = self.license.copy() if self.license else None - - # If that title hasn't been changed, apply the default prefix (once) - if (new.title == self.title - and top_level - and language.TEMPLATED_FROM_PREFIX not in new.title): - new.title = ''.join((language.TEMPLATED_FROM_PREFIX, new.title, )) - - # Slight hack - date_created is a read-only field. - new._fields['date_created'].__set__( - new, - timezone.now(), - safe=True - ) - - new.save(suppress_log=True) - - # Log the creation - new.add_log( - NodeLog.CREATED_FROM, - params={ - 'node': new._primary_key, - 'template_node': { - 'id': self._primary_key, - 'url': self.url, - 'title': self.title, - }, - }, - auth=auth, - log_date=new.date_created, - save=False, - ) - - # add mandatory addons - # TODO: This logic also exists in self.save() - for addon in settings.ADDONS_AVAILABLE: - if 'node' in addon.added_default: - new.add_addon(addon.short_name, auth=None, log=False) - - # deal with the children of the node, if any - new.nodes = [ - x.use_as_template(auth, changes, top_level=False) - for x in self.nodes - if x.can_view(auth) and not x.is_deleted - ] - - new.save() - return new - - ############ - # Pointers # - ############ - - def add_pointer(self, node, auth, save=True): - """Add a pointer to a node. - - :param Node node: Node to add - :param Auth auth: Consolidated authorization - :param bool save: Save changes - :return: Created pointer - """ - # Fail if node already in nodes / pointers. Note: cast node and node - # to primary keys to test for conflicts with both nodes and pointers - # contained in `self.nodes`. - if node._id in self.node_ids: - raise ValueError( - 'Pointer to node {0} already in list'.format(node._id) - ) - - if self.is_registration: - raise NodeStateError('Cannot add a pointer to a registration') - - # If a folder, prevent more than one pointer to that folder. This will prevent infinite loops on the project organizer. - already_pointed = node.pointed - if node.is_collection and len(already_pointed) > 0: - raise ValueError( - 'Pointer to folder {0} already exists. Only one pointer to any given folder allowed'.format(node._id) - ) - if node.is_bookmark_collection: - raise ValueError( - 'Pointer to bookmark collection ({0}) not allowed.'.format(node._id) - ) - - # Append pointer - pointer = Pointer(node=node) - pointer.save() - self.nodes.append(pointer) - - # Add log - self.add_log( - action=NodeLog.POINTER_CREATED, - params={ - 'parent_node': self.parent_id, - 'node': self._primary_key, - 'pointer': { - 'id': pointer.node._id, - 'url': pointer.node.url, - 'title': pointer.node.title, - 'category': pointer.node.category, - }, - }, - auth=auth, - save=False, - ) - - # Optionally save changes - if save: - self.save() - - return pointer - - def rm_pointer(self, pointer, auth): - """Remove a pointer. - - :param Pointer pointer: Pointer to remove - :param Auth auth: Consolidated authorization - """ - if pointer not in self.nodes: - raise ValueError('Node link does not belong to the requested node.') - - # Remove `Pointer` object; will also remove self from `nodes` list of - # parent node - Pointer.remove_one(pointer) - - # Add log - self.add_log( - action=NodeLog.POINTER_REMOVED, - params={ - 'parent_node': self.parent_id, - 'node': self._primary_key, - 'pointer': { - 'id': pointer.node._id, - 'url': pointer.node.url, - 'title': pointer.node.title, - 'category': pointer.node.category, - }, - }, - auth=auth, - save=False, - ) - - @property - def node_ids(self): - return [ - node._id if node.primary else node.node._id - for node in self.nodes - ] - - @property - def nodes_primary(self): - return [ - node - for node in self.nodes - if node.primary - ] - - def node_and_primary_descendants(self): - """Return an iterator for a node and all of its primary (non-pointer) descendants. - - :param node Node: target Node - """ - return itertools.chain([self], self.get_descendants_recursive(lambda n: n.primary)) - - @property - def depth(self): - return len(self.parents) - - def next_descendants(self, auth, condition=lambda auth, node: True): - """ - Recursively find the first set of descedants under a given node that meet a given condition - - returns a list of [(node, [children]), ...] - """ - ret = [] - for node in self.nodes: - if condition(auth, node): - # base case - ret.append((node, [])) - else: - ret.append((node, node.next_descendants(auth, condition))) - ret = [item for item in ret if item[1] or condition(auth, item[0])] # prune empty branches - return ret - - def get_descendants_recursive(self, include=lambda n: True): - for node in self.nodes: - if include(node): - yield node - if node.primary: - for descendant in node.get_descendants_recursive(include): - if include(descendant): - yield descendant - - def get_aggregate_logs_query(self, auth): - ids = [self._id] + [n._id - for n in self.get_descendants_recursive() - if n.can_view(auth)] - query = Q('node', 'in', ids) & Q('should_hide', 'ne', True) - return query - - def get_aggregate_logs_queryset(self, auth): - query = self.get_aggregate_logs_query(auth) - return NodeLog.find(query).sort('-date') - - @property - def nodes_pointer(self): - return [ - node - for node in self.nodes - if not node.primary - ] - - @property - def has_pointers_recursive(self): - """Recursively checks whether the current node or any of its nodes - contains a pointer. - """ - if self.nodes_pointer.exists(): - return True - for node in self.nodes_primary: - if node.has_pointers_recursive: - return True - return False - - @property - def pointed(self): - return Pointer.find(Q('node', 'eq', self._id)) - - def pointing_at(self, pointed_node_id): - """This node is pointed at another node. - - :param Node pointed_node_id: The node id of the node being pointed at. - :return: pointer_id - """ - for pointer in self.nodes_pointer: - node_id = pointer.node._id - if node_id == pointed_node_id: - return pointer._id - return None - - def get_points(self, folders=False, deleted=False, resolve=True): - ret = [] - for each in self.pointed: - pointer_node = get_pointer_parent(each) - if not folders and pointer_node.is_collection: - continue - if not deleted and pointer_node.is_deleted: - continue - if resolve: - ret.append(pointer_node) - else: - ret.append(each) - return ret - - def resolve(self): - return self - - def fork_pointer(self, pointer, auth, save=True): - """Replace a pointer with a fork. If the pointer points to a project, - fork the project and replace the pointer with a new pointer pointing - to the fork. If the pointer points to a component, fork the component - and add it to the current node. - - :param Pointer pointer: - :param Auth auth: - :param bool save: - :return: Forked node - """ - # Fail if pointer not contained in `nodes` - try: - index = self.nodes.index(pointer) - except ValueError: - raise ValueError('Pointer {0} not in list'.format(pointer._id)) - - # Get pointed node - node = pointer.node - - # Fork into current node and replace pointer with forked component - forked = node.fork_node(auth) - if forked is None: - raise ValueError('Could not fork node') - - self.nodes[index] = forked - - # Add log - self.add_log( - NodeLog.POINTER_FORKED, - params={ - 'parent_node': self.parent_id, - 'node': self._primary_key, - 'pointer': { - 'id': pointer.node._id, - 'url': pointer.node.url, - 'title': pointer.node.title, - 'category': pointer.node.category, - }, - }, - auth=auth, - save=False, - ) - - # Optionally save changes - if save: - self.save() - # Garbage-collect pointer. Note: Must save current node before - # removing pointer, else remove will fail when trying to remove - # backref from self to pointer. - Pointer.remove_one(pointer) - - # Return forked content - return forked - - def get_recent_logs(self, n=10): - """Return a list of the n most recent logs, in reverse chronological - order. - - :param int n: Number of logs to retrieve - """ - return self.logs.sort('-date')[:n] - - def set_title(self, title, auth, save=False): - """Set the title of this Node and log it. - - :param str title: The new title. - :param auth: All the auth information including user, API key. - """ - #Called so validation does not have to wait until save. - validate_title(title) - - original_title = self.title - new_title = sanitize.strip_html(title) - # Title hasn't changed after sanitzation, bail out - if original_title == new_title: - return False - self.title = new_title - self.add_log( - action=NodeLog.EDITED_TITLE, - params={ - 'parent_node': self.parent_id, - 'node': self._primary_key, - 'title_new': self.title, - 'title_original': original_title, - }, - auth=auth, - save=False, - ) - if save: - self.save() - return None - - def set_description(self, description, auth, save=False): - """Set the description and log the event. - - :param str description: The new description - :param auth: All the auth informtion including user, API key. - :param bool save: Save self after updating. - """ - original = self.description - new_description = sanitize.strip_html(description) - if original == new_description: - return False - self.description = new_description - self.add_log( - action=NodeLog.EDITED_DESCRIPTION, - params={ - 'parent_node': self.parent_id, - 'node': self._primary_key, - 'description_new': self.description, - 'description_original': original - }, - auth=auth, - save=False, - ) - if save: - self.save() - return None - - def on_update(self, first_save, saved_fields): - if settings.RUNNING_MIGRATION: # no-no during migration - return - request, user_id = get_request_and_user_id() - request_headers = {} - if not isinstance(request, DummyRequest): - request_headers = { - k: v - for k, v in get_headers_from_request(request).items() - if isinstance(v, basestring) - } - enqueue_task(node_tasks.on_node_updated.s(self._id, user_id, first_save, saved_fields, request_headers)) - - if self.preprint_file and bool(self.SEARCH_UPDATE_FIELDS.intersection(saved_fields)): - # avoid circular imports - from website.preprints.tasks import on_preprint_updated - from website.preprints.model import PreprintService - # .preprints wouldn't return a single deleted preprint - for preprint in PreprintService.find(Q('node', 'eq', self)): - enqueue_task(on_preprint_updated.s(preprint._id)) - - user = User.load(user_id) - if user and self.check_spam(user, saved_fields, request_headers): - # Specifically call the super class save method to avoid recursion into model save method. - super(Node, self).save() - - def update_search(self): - from website import search - try: - search.search.update_node(self, bulk=False, async=True) - except search.exceptions.SearchUnavailableError as e: - logger.exception(e) - log_exception() - - @classmethod - def bulk_update_search(cls, nodes, index=None): - from website import search - try: - serialize = functools.partial(search.search.update_node, index=index, bulk=True, async=False) - search.search.bulk_update_nodes(serialize, nodes, index=index) - except search.exceptions.SearchUnavailableError as e: - logger.exception(e) - log_exception() - - def delete_search_entry(self): - from website import search - try: - search.search.delete_node(self) - except search.exceptions.SearchUnavailableError as e: - logger.exception(e) - log_exception() - - def delete_registration_tree(self, save=False): - self.is_deleted = True - for draft_registration in DraftRegistration.find(Q('registered_node', 'eq', self)): - # Allow draft registration to be submitted - if draft_registration.approval: - draft_registration.approval = None - draft_registration.save() - if not getattr(self.embargo, 'for_existing_registration', False): - self.registered_from = None - if save: - self.save() - self.update_search() - for child in self.nodes_primary: - child.delete_registration_tree(save=save) - - def remove_node(self, auth, date=None): - """Marks a node as deleted. - - TODO: Call a hook on addons - Adds a log to the parent node if applicable - - :param auth: an instance of :class:`Auth`. - :param date: Date node was removed - :type date: `datetime.datetime` or `None` - """ - # TODO: rename "date" param - it's shadowing a global - - if self.is_bookmark_collection: - raise NodeStateError('Bookmark collections may not be deleted.') - - if not self.can_edit(auth): - raise PermissionsError('{0!r} does not have permission to modify this {1}'.format(auth.user, self.category or 'node')) - - # if this is a collection, remove all the collections that this is pointing at. - if self.is_collection: - for pointed in self.nodes_pointer: - if pointed.node.is_collection: - pointed.node.remove_node(auth=auth) - - if [x for x in self.nodes_primary if not x.is_deleted]: - raise NodeStateError('Any child components must be deleted prior to deleting this project.') - - # After delete callback - for addon in self.get_addons(): - message = addon.after_delete(self, auth.user) - if message: - status.push_status_message(message, kind='info', trust=False) - - log_date = date or timezone.now() - - # Add log to parent - if self.node__parent: - self.node__parent[0].add_log( - NodeLog.NODE_REMOVED, - params={ - 'project': self._primary_key, - }, - auth=auth, - log_date=log_date, - save=True, - ) - else: - self.add_log( - NodeLog.PROJECT_DELETED, - params={ - 'project': self._primary_key, - }, - auth=auth, - log_date=log_date, - save=True, - ) - - self.is_deleted = True - self.deleted_date = date - self.save() - - project_signals.node_deleted.send(self) - - return True - - def fork_node(self, auth, title=None): - """Recursively fork a node. - - :param Auth auth: Consolidated authorization - :param str title: Optional text to prepend to forked title - :return: Forked node - """ - PREFIX = 'Fork of ' - user = auth.user - - # Non-contributors can't fork private nodes - if not (self.is_public or self.has_permission(user, 'read')): - raise PermissionsError('{0!r} does not have permission to fork node {1!r}'.format(user, self._id)) - - when = timezone.now() - - original = self.load(self._primary_key) - - if original.is_deleted: - raise NodeStateError('Cannot fork deleted node.') - - # Note: Cloning a node will clone each node wiki page version and add it to - # `registered.wiki_pages_current` and `registered.wiki_pages_versions`. - forked = original.clone() - - forked.tags = self.tags - - # Recursively fork child nodes - for node_contained in original.nodes: - if not node_contained.is_deleted: - forked_node = None - try: # Catch the potential PermissionsError above - forked_node = node_contained.fork_node(auth=auth, title='') - except PermissionsError: - pass # If this exception is thrown omit the node from the result set - if forked_node is not None: - forked.nodes.append(forked_node) - - if title is None: - forked.title = PREFIX + original.title - elif title == '': - forked.title = original.title - else: - forked.title = title - - forked.is_fork = True - forked.is_registration = False - forked.forked_date = when - forked.forked_from = original - forked.creator = user - forked.node_license = original.license.copy() if original.license else None - forked.wiki_private_uuids = {} - - # Forks default to private status - forked.is_public = False - - # Clear permissions before adding users - forked.permissions = {} - forked.visible_contributor_ids = [] - - for citation in self.alternative_citations: - forked.add_citation( - auth=auth, - citation=citation.clone(), - log=False, - save=False - ) - - forked.add_contributor( - contributor=user, - permissions=CREATOR_PERMISSIONS, - log=False, - save=False - ) - - # Need this save in order to access _primary_key - forked.save() - - # Need to call this after save for the notifications to be created with the _primary_key - project_signals.contributor_added.send(forked, contributor=user, auth=auth) - - forked.add_log( - action=NodeLog.NODE_FORKED, - params={ - 'parent_node': original.parent_id, - 'node': original._primary_key, - 'registration': forked._primary_key, # TODO: Remove this in favor of 'fork' - 'fork': forked._primary_key, - }, - auth=auth, - log_date=when, - save=False, - ) - - # Clone each log from the original node for this fork. - logs = original.logs - for log in logs: - log.clone_node_log(forked._id) - - forked.reload() - - # After fork callback - for addon in original.get_addons(): - _, message = addon.after_fork(original, forked, user) - if message: - status.push_status_message(message, kind='info', trust=True) - - return forked - - def register_node(self, schema, auth, data, parent=None, recur=False): - """Make a frozen copy of a node. - - :param schema: Schema object - :param auth: All the auth information including user, API key. - :param template: Template name - :param data: Form data - :param parent Node: parent registration of registration to be created - """ - # TODO(lyndsysimon): "template" param is not necessary - use schema.name? - # NOTE: Admins can register child nodes even if they don't have write access them - if not self.can_edit(auth=auth) and not self.is_admin_parent(user=auth.user): - raise PermissionsError( - 'User {} does not have permission ' - 'to register this node'.format(auth.user._id) - ) - if self.is_collection: - raise NodeStateError('Folders may not be registered') - - when = timezone.now() - - original = self.load(self._primary_key) - - # Note: Cloning a node will clone each node wiki page version and add it to - # `registered.wiki_pages_current` and `registered.wiki_pages_versions`. - if original.is_deleted: - raise NodeStateError('Cannot register deleted node.') - - registered = original.clone() - - registered.is_registration = True - registered.registered_date = when - registered.registered_user = auth.user - registered.registered_schema.append(schema) - registered.registered_from = original - if not registered.registered_meta: - registered.registered_meta = {} - registered.registered_meta[schema._id] = data - - registered.contributors = self.contributors - registered.forked_from = self.forked_from - registered.creator = self.creator - registered.tags = self.tags - registered._affiliated_institutions = self._affiliated_institutions - registered.alternative_citations = self.alternative_citations - registered.node_license = original.license.copy() if original.license else None - registered.wiki_private_uuids = {} - - registered.save() - - # Clone each log from the original node for this registration. - logs = original.logs - for log in logs: - log.clone_node_log(registered._id) - - registered.is_public = False - for node in registered.get_descendants_recursive(): - node.is_public = False - node.save() - - if parent: - registered._parent_node = parent - - # After register callback - for addon in original.get_addons(): - _, message = addon.after_register(original, registered, auth.user) - if message and not recur: - status.push_status_message(message, kind='info', trust=False) - - for node_relation in original.node_relations.filter(child__is_deleted=False): - node_contained = node_relation.child - # Register child nodes - if not node_relation.is_node_link: - registered_child = node_contained.register_node( # noqa - schema=schema, - auth=auth, - data=data, - parent=registered, - ) - else: - from osf.models.node_relation import NodeRelation - NodeRelation.objects.get_or_create( - is_node_link=True, - parent=registered, - child=node_contained - ) - - registered.save() - - if settings.ENABLE_ARCHIVER: - registered.reload() - project_signals.after_create_registration.send(self, dst=registered, user=auth.user) - - return registered - - def remove_tag(self, tag, auth, save=True): - if not tag: - raise InvalidTagError - elif tag not in self.tags: - raise TagNotFoundError - else: - self.tags.remove(tag) - self.add_log( - action=NodeLog.TAG_REMOVED, - params={ - 'parent_node': self.parent_id, - 'node': self._primary_key, - 'tag': tag, - }, - auth=auth, - save=False, - ) - if save: - self.save() - return True - - def add_tag(self, tag, auth, save=True, log=True): - if not isinstance(tag, Tag): - tag_instance = Tag.load(tag) - if tag_instance is None: - tag_instance = Tag(_id=tag) - else: - tag_instance = tag - # should noop if it's not dirty - tag_instance.save() - - if tag_instance._id not in self.tags: - self.tags.append(tag_instance) - if log: - self.add_log( - action=NodeLog.TAG_ADDED, - params={ - 'parent_node': self.parent_id, - 'node': self._primary_key, - 'tag': tag_instance._id, - }, - auth=auth, - save=False, - ) - if save: - self.save() - - def add_citation(self, auth, save=False, log=True, citation=None, **kwargs): - if not citation: - citation = AlternativeCitation(**kwargs) - citation.save() - self.alternative_citations.append(citation) - citation_dict = {'name': citation.name, 'text': citation.text} - if log: - self.add_log( - action=NodeLog.CITATION_ADDED, - params={ - 'node': self._primary_key, - 'citation': citation_dict - }, - auth=auth, - save=False - ) - if save: - self.save() - return citation - - def edit_citation(self, auth, instance, save=False, log=True, **kwargs): - citation = {'name': instance.name, 'text': instance.text} - new_name = kwargs.get('name', instance.name) - new_text = kwargs.get('text', instance.text) - if new_name != instance.name: - instance.name = new_name - citation['new_name'] = new_name - if new_text != instance.text: - instance.text = new_text - citation['new_text'] = new_text - instance.save() - if log: - self.add_log( - action=NodeLog.CITATION_EDITED, - params={ - 'node': self._primary_key, - 'citation': citation - }, - auth=auth, - save=False - ) - if save: - self.save() - return instance - - def remove_citation(self, auth, instance, save=False, log=True): - citation = {'name': instance.name, 'text': instance.text} - self.alternative_citations.remove(instance) - if log: - self.add_log( - action=NodeLog.CITATION_REMOVED, - params={ - 'node': self._primary_key, - 'citation': citation - }, - auth=auth, - save=False - ) - if save: - self.save() - - def add_log(self, action, params, auth, foreign_user=None, log_date=None, save=True): - user = auth.user if auth else None - params['node'] = params.get('node') or params.get('project') or self._id - log = NodeLog( - action=action, - user=user, - foreign_user=foreign_user, - params=params, - node=self, - original_node=params['node'] - ) - - if log_date: - log.date = log_date - log.save() - - if len(self.logs) == 1: - self.date_modified = log.date.replace(tzinfo=None) - else: - self.date_modified = self.logs.latest().date.replace(tzinfo=None) - - if save: - self.save() - if user and not self.is_collection: - increment_user_activity_counters(user._primary_key, action, log.date.isoformat()) - return log - - @classmethod - def find_for_user(cls, user, subquery=None): - combined_query = Q('contributors', 'eq', user._id) - - if subquery is not None: - combined_query = combined_query & subquery - return cls.find(combined_query) - - @property - def url(self): - return '/{}/'.format(self._primary_key) - - def web_url_for(self, view_name, _absolute=False, _guid=False, *args, **kwargs): - return web_url_for(view_name, pid=self._primary_key, _absolute=_absolute, _guid=_guid, *args, **kwargs) - - def api_url_for(self, view_name, _absolute=False, *args, **kwargs): - return api_url_for(view_name, pid=self._primary_key, _absolute=_absolute, *args, **kwargs) - - @property - def absolute_url(self): - if not self.url: - logger.error('Node {0} has a parent that is not a project'.format(self._id)) - return None - return urlparse.urljoin(settings.DOMAIN, self.url) - - @property - def display_absolute_url(self): - url = self.absolute_url - if url is not None: - return re.sub(r'https?:', '', url).strip('/') - - @property - def api_v2_url(self): - return reverse('nodes:node-detail', kwargs={'node_id': self._id, 'version': 'v2'}) - - @property - def absolute_api_v2_url(self): - if self.is_registration: - path = '/registrations/{}/'.format(self._id) - return api_v2_url(path) - if self.is_collection: - path = '/collections/{}/'.format(self._id) - return api_v2_url(path) - path = '/nodes/{}/'.format(self._id) - return api_v2_url(path) - - # used by django and DRF - def get_absolute_url(self): - return self.absolute_api_v2_url - - @property - def api_url(self): - if not self.url: - logger.error('Node {0} has a parent that is not a project'.format(self._id)) - return None - return '/api/v1{0}'.format(self.deep_url) - - @property - def deep_url(self): - return '/project/{}/'.format(self._primary_key) - - @property - def linked_nodes_self_url(self): - return self.absolute_api_v2_url + 'relationships/linked_nodes/' - - @property - def linked_registrations_self_url(self): - return self.absolute_api_v2_url + 'relationships/linked_registrations/' - - @property - def linked_nodes_related_url(self): - return self.absolute_api_v2_url + 'linked_nodes/' - - @property - def linked_registrations_related_url(self): - return self.absolute_api_v2_url + 'linked_registrations/' - - @property - def csl(self): # formats node information into CSL format for citation parsing - """a dict in CSL-JSON schema - - For details on this schema, see: - https://github.com/citation-style-language/schema#csl-json-schema - """ - csl = { - 'id': self._id, - 'title': sanitize.unescape_entities(self.title), - 'author': [ - contributor.csl_name # method in auth/model.py which parses the names of authors - for contributor in self.visible_contributors - ], - 'publisher': 'Open Science Framework', - 'type': 'webpage', - 'URL': self.display_absolute_url, - } - - doi = self.get_identifier_value('doi') - if doi: - csl['DOI'] = doi - - if self.logs: - csl['issued'] = datetime_to_csl(self.logs.latest().date) - - return csl - - def author_list(self, and_delim='&'): - author_names = [ - author.biblio_name - for author in self.visible_contributors - if author - ] - if len(author_names) < 2: - return ' {0} '.format(and_delim).join(author_names) - if len(author_names) > 7: - author_names = author_names[:7] - author_names.append('et al.') - return ', '.join(author_names) - return u'{0}, {1} {2}'.format( - ', '.join(author_names[:-1]), - and_delim, - author_names[-1] - ) - - @property - def templated_list(self): - return Node.find(Q('template_node', 'eq', self._id) & Q('is_deleted', 'ne', True)) - - @property - def _parent_node(self): - """The parent node, if it exists, otherwise ``None``. Note: this - property is named `parent_node` rather than `parent` to avoid a - conflict with the `parent` back-reference created by the `nodes` - field on this schema. - """ - try: - if not self.node__parent[0].is_deleted: - return self.node__parent[0] - except IndexError: - pass - return None - - @_parent_node.setter - def _parent_node(self, parent): - parent.nodes.append(self) - parent.save() - - def _get_parent(self): - try: - return self.node__parent[0] - except IndexError: - pass - return None - - @property - def _root(self): - parent = self._get_parent() - if parent: - return parent._root - else: - return self - - @property - def archiving(self): - job = self.archive_job - return job and not job.done and not job.archive_tree_finished() - - @property - def archive_job(self): - return self.archivejob__active[0] if self.archivejob__active else None - - @property - def registrations_all(self): - return Node.find(Q('registered_from', 'eq', self._id)) - - @property - def registrations(self): - # TODO: This method may be totally unused - return Node.find(Q('registered_from', 'eq', self._id) & Q('archiving', 'eq', False)) - - @property - def watch_url(self): - return os.path.join(self.api_url, 'watch/') - - @property - def parent_id(self): - if self.node__parent: - return self.node__parent[0]._primary_key - return None - - @property - def forked_from_id(self): - if self.forked_from: - return self.forked_from._id - return None - - @property - def registered_schema_id(self): - if self.registered_schema: - return self.registered_schema[0]._id - return None - - @property - def project_or_component(self): - # The distinction is drawn based on whether something has a parent node, rather than by category - return 'project' if not self.parent_node else 'component' - - def is_contributor(self, user): - return ( - user is not None - and ( - user._id in self.contributors - ) - ) - - def add_addon(self, addon_name, auth, log=True, *args, **kwargs): - """Add an add-on to the node. Do nothing if the addon is already - enabled. - - :param str addon_name: Name of add-on - :param Auth auth: Consolidated authorization object - :param bool log: Add a log after adding the add-on - :return: A boolean, whether the addon was added - """ - ret = AddonModelMixin.add_addon(self, addon_name, auth=auth, - *args, **kwargs) - if ret and log: - config = settings.ADDONS_AVAILABLE_DICT[addon_name] - self.add_log( - action=NodeLog.ADDON_ADDED, - params={ - 'project': self.parent_id, - 'node': self._primary_key, - 'addon': config.full_name, - }, - auth=auth, - save=False, - ) - self.save() # TODO: here, or outside the conditional? @mambocab - return ret - - def delete_addon(self, addon_name, auth, _force=False): - """Delete an add-on from the node. - - :param str addon_name: Name of add-on - :param Auth auth: Consolidated authorization object - :param bool _force: For migration testing ONLY. Do not set to True - in the application, or else projects will be allowed to delete - mandatory add-ons! - :return bool: Add-on was deleted - """ - ret = super(Node, self).delete_addon(addon_name, auth, _force) - if ret: - config = settings.ADDONS_AVAILABLE_DICT[addon_name] - self.add_log( - action=NodeLog.ADDON_REMOVED, - params={ - 'project': self.parent_id, - 'node': self._primary_key, - 'addon': config.full_name, - }, - auth=auth, - save=False, - ) - self.save() - # TODO: save here or outside the conditional? @mambocab - return ret - - def callback(self, callback, recursive=False, *args, **kwargs): - """Invoke callbacks of attached add-ons and collect messages. - - :param str callback: Name of callback method to invoke - :param bool recursive: Apply callback recursively over nodes - :return list: List of callback messages - """ - messages = [] - - for addon in self.get_addons(): - method = getattr(addon, callback) - message = method(self, *args, **kwargs) - if message: - messages.append(message) - - if recursive: - for child in self.nodes: - if not child.is_deleted: - messages.extend( - child.callback( - callback, recursive, *args, **kwargs - ) - ) - - return messages - - def replace_contributor(self, old, new): - for i, contrib in enumerate(self.contributors): - if contrib._primary_key == old._primary_key: - self.contributors[i] = new - # Remove unclaimed record for the project - if self._primary_key in old.unclaimed_records: - del old.unclaimed_records[self._primary_key] - old.save() - for permission in self.get_permissions(old): - self.add_permission(new, permission) - self.permissions.pop(old._id) - if old._id in self.visible_contributor_ids: - self.visible_contributor_ids[self.visible_contributor_ids.index(old._id)] = new._id - return True - return False - - def remove_contributor(self, contributor, auth, log=True): - """Remove a contributor from this node. - - :param contributor: User object, the contributor to be removed - :param auth: All the auth information including user, API key. - """ - # remove unclaimed record if necessary - if self._primary_key in contributor.unclaimed_records: - del contributor.unclaimed_records[self._primary_key] - - self.contributors.remove(contributor._id) - - self.clear_permission(contributor) - if contributor._id in self.visible_contributor_ids: - self.visible_contributor_ids.remove(contributor._id) - - if not self.visible_contributor_ids: - return False - - # Node must have at least one registered admin user - admins = list(self.get_admin_contributors(self.contributors)) - if not admins: - return False - - # Clear permissions for removed user - self.permissions.pop(contributor._id, None) - - # After remove callback - for addon in self.get_addons(): - message = addon.after_remove_contributor(self, contributor, auth) - if message: - # Because addons can return HTML strings, addons are responsible for markupsafe-escaping any messages returned - status.push_status_message(message, kind='info', trust=True) - - if log: - self.add_log( - action=NodeLog.CONTRIB_REMOVED, - params={ - 'project': self.parent_id, - 'node': self._primary_key, - 'contributors': [contributor._id], - }, - auth=auth, - save=False, - ) - - self.save() - - #send signal to remove this user from project subscriptions - project_signals.contributor_removed.send(self, user=contributor) - - return True - - def remove_contributors(self, contributors, auth=None, log=True, save=False): - - results = [] - removed = [] - - for contrib in contributors: - outcome = self.remove_contributor( - contributor=contrib, auth=auth, log=False, - ) - results.append(outcome) - removed.append(contrib._id) - if log: - self.add_log( - action=NodeLog.CONTRIB_REMOVED, - params={ - 'project': self.parent_id, - 'node': self._primary_key, - 'contributors': removed, - }, - auth=auth, - save=False, - ) - - if save: - self.save() - - return all(results) - - def move_contributor(self, user, auth, index, save=False): - if not self.has_permission(auth.user, ADMIN): - raise PermissionsError('Only admins can modify contributor order') - old_index = self.contributors.index(user) - self.contributors.insert(index, self.contributors.pop(old_index)) - self.add_log( - action=NodeLog.CONTRIB_REORDERED, - params={ - 'project': self.parent_id, - 'node': self._id, - 'contributors': [ - user._id - ], - }, - auth=auth, - save=False, - ) - if save: - self.save() - - def update_contributor(self, user, permission, visible, auth, save=False): - """ TODO: this method should be updated as a replacement for the main loop of - Node#manage_contributors. Right now there are redundancies, but to avoid major - feature creep this will not be included as this time. - - Also checks to make sure unique admin is not removing own admin privilege. - """ - if not self.has_permission(auth.user, ADMIN): - raise PermissionsError('Only admins can modify contributor permissions') - - if permission: - permissions = expand_permissions(permission) - admins = [contrib for contrib in self.contributors if - self.has_permission(contrib, 'admin') and contrib.is_active] - if not len(admins) > 1: - # has only one admin - admin = admins[0] - if admin == user and ADMIN not in permissions: - raise NodeStateError('{} is the only admin.'.format(user.fullname)) - if user not in self.contributors: - raise ValueError( - 'User {0} not in contributors'.format(user.fullname) - ) - if set(permissions) != set(self.get_permissions(user)): - self.set_permissions(user, permissions, save=save) - permissions_changed = { - user._id: permissions - } - self.add_log( - action=NodeLog.PERMISSIONS_UPDATED, - params={ - 'project': self.parent_id, - 'node': self._id, - 'contributors': permissions_changed, - }, - auth=auth, - save=save - ) - with TokuTransaction(): - if ['read'] in permissions_changed.values(): - project_signals.write_permissions_revoked.send(self) - if visible is not None: - self.set_visible(user, visible, auth=auth) - self.update_visible_ids(save=save) - - def manage_contributors(self, user_dicts, auth, save=False): - """Reorder and remove contributors. - - :param list user_dicts: Ordered list of contributors represented as - dictionaries of the form: - {'id': , 'permission': , 'visible': bool} - :param Auth auth: Consolidated authentication information - :param bool save: Save changes - :raises: ValueError if any users in `users` not in contributors or if - no admin contributors remaining - """ - with TokuTransaction(): - users = [] - user_ids = [] - permissions_changed = {} - visibility_removed = [] - to_retain = [] - to_remove = [] - for user_dict in user_dicts: - user = User.load(user_dict['id']) - if user is None: - raise ValueError('User not found') - if user not in self.contributors: - raise ValueError( - 'User {0} not in contributors'.format(user.fullname) - ) - permissions = expand_permissions(user_dict['permission']) - if set(permissions) != set(self.get_permissions(user)): - # Validate later - self.set_permissions(user, permissions, validate=False, save=False) - permissions_changed[user._id] = permissions - # visible must be added before removed to ensure they are validated properly - if user_dict['visible']: - self.set_visible(user, - visible=True, - auth=auth) - else: - visibility_removed.append(user) - users.append(user) - user_ids.append(user_dict['id']) - - for user in visibility_removed: - self.set_visible(user, - visible=False, - auth=auth) - - for user in self.contributors: - if user._id in user_ids: - to_retain.append(user) - else: - to_remove.append(user) - - admins = list(self.get_admin_contributors(users)) - if users is None or not admins: - raise NodeStateError( - 'Must have at least one registered admin contributor' - ) - - if to_retain != users: - self.add_log( - action=NodeLog.CONTRIB_REORDERED, - params={ - 'project': self.parent_id, - 'node': self._id, - 'contributors': [ - user._id - for user in users - ], - }, - auth=auth, - save=False, - ) - - if to_remove: - self.remove_contributors(to_remove, auth=auth, save=False) - - self.contributors = users - - if permissions_changed: - self.add_log( - action=NodeLog.PERMISSIONS_UPDATED, - params={ - 'project': self.parent_id, - 'node': self._id, - 'contributors': permissions_changed, - }, - auth=auth, - save=False, - ) - # Update list of visible IDs - self.update_visible_ids() - if save: - self.save() - - with TokuTransaction(): - if to_remove or permissions_changed and ['read'] in permissions_changed.values(): - project_signals.write_permissions_revoked.send(self) - - def add_contributor(self, contributor, permissions=None, visible=True, - send_email='default', auth=None, log=True, save=False): - """Add a contributor to the project. - - :param User contributor: The contributor to be added - :param list permissions: Permissions to grant to the contributor - :param bool visible: Contributor is visible in project dashboard - :param str send_email: Email preference for notifying added contributor - :param Auth auth: All the auth information including user, API key - :param bool log: Add log to self - :param bool save: Save after adding contributor - :returns: Whether contributor was added - """ - MAX_RECENT_LENGTH = 15 - - # If user is merged into another account, use master account - contrib_to_add = contributor.merged_by if contributor.is_merged else contributor - if contrib_to_add not in self.contributors: - - self.contributors.append(contrib_to_add) - if visible: - self.set_visible(contrib_to_add, visible=True, log=False) - - # Add default contributor permissions - permissions = permissions or DEFAULT_CONTRIBUTOR_PERMISSIONS - for permission in permissions: - self.add_permission(contrib_to_add, permission, save=False) - - # Add contributor to recently added list for user - if auth is not None: - user = auth.user - if not self.has_permission(user, ADMIN): - raise PermissionsError('Must be an admin to add contributors.') - if contrib_to_add in user.recently_added: - user.recently_added.remove(contrib_to_add) - user.recently_added.insert(0, contrib_to_add) - while len(user.recently_added) > MAX_RECENT_LENGTH: - user.recently_added.pop() - - if log: - self.add_log( - action=NodeLog.CONTRIB_ADDED, - params={ - 'project': self.parent_id, - 'node': self._primary_key, - 'contributors': [contrib_to_add._primary_key], - }, - auth=auth, - save=False, - ) - if save: - self.save() - - if self._id and send_email != 'false': - project_signals.contributor_added.send(self, contributor=contributor, auth=auth, email_template=send_email) - - return True - - # Permissions must be overridden if changed when contributor is added to parent he/she is already on a child of. - elif contrib_to_add in self.contributors and permissions is not None: - if auth is not None: - if not self.has_permission(auth.user, ADMIN): - raise PermissionsError('Must be an admin to edit contributor permissions.') - self.set_permissions(contrib_to_add, permissions) - if save: - self.save() - - return False - else: - return False - - def add_contributors(self, contributors, auth=None, log=True, save=False): - """Add multiple contributors - - :param list contributors: A list of dictionaries of the form: - { - 'user': , - 'permissions': , - 'visible': - } - :param auth: All the auth information including user, API key. - :param log: Add log to self - :param save: Save after adding contributor - """ - for contrib in contributors: - self.add_contributor( - contributor=contrib['user'], permissions=contrib['permissions'], - visible=contrib['visible'], auth=auth, log=False, save=False, - ) - if log and contributors: - self.add_log( - action=NodeLog.CONTRIB_ADDED, - params={ - 'project': self.parent_id, - 'node': self._primary_key, - 'contributors': [ - contrib['user']._id - for contrib in contributors - ], - }, - auth=auth, - save=False, - ) - if save: - self.save() - - def add_unregistered_contributor(self, fullname, email, auth, send_email='default', visible=True, permissions=None, save=False, existing_user=None): - """Add a non-registered contributor to the project. - - :param str fullname: The full name of the person. - :param str email: The email address of the person. - :param Auth auth: Auth object for the user adding the contributor. - :param User existing_user: the unregister_contributor if it is already created, otherwise None - :returns: The added contributor - :raises: DuplicateEmailError if user with given email is already in the database. - """ - # Create a new user record if you weren't passed an existing_user - contributor = existing_user if existing_user else User.create_unregistered(fullname=fullname, email=email) - - contributor.add_unclaimed_record(node=self, referrer=auth.user, - given_name=fullname, email=email) - try: - contributor.save() - except ValidationValueError: # User with same email already exists - contributor = get_user(email=email) - # Unregistered users may have multiple unclaimed records, so - # only raise error if user is registered. - if contributor.is_registered or self.is_contributor(contributor): - raise - contributor.add_unclaimed_record(node=self, referrer=auth.user, - given_name=fullname, email=email) - contributor.save() - - self.add_contributor( - contributor, permissions=permissions, auth=auth, - visible=visible, send_email=send_email, log=True, save=False - ) - self.save() - return contributor - - def add_contributor_registered_or_not(self, auth, user_id=None, full_name=None, email=None, send_email='false', - permissions=None, bibliographic=True, index=None, save=False): - - if user_id: - contributor = User.load(user_id) - if not contributor: - raise ValueError('User with id {} was not found.'.format(user_id)) - if contributor in self.contributors: - raise ValidationValueError('{} is already a contributor.'.format(contributor.fullname)) - self.add_contributor(contributor=contributor, auth=auth, visible=bibliographic, - permissions=permissions, send_email=send_email, save=True) - else: - - try: - contributor = self.add_unregistered_contributor(fullname=full_name, email=email, auth=auth, - send_email=send_email, permissions=permissions, - visible=bibliographic, save=True) - except ValidationValueError: - contributor = get_user(email=email) - if contributor in self.contributors: - raise ValidationValueError('{} is already a contributor.'.format(contributor.fullname)) - self.add_contributor(contributor=contributor, auth=auth, visible=bibliographic, - send_email=send_email, permissions=permissions, save=True) - - auth.user.email_last_sent = timezone.now() - auth.user.save() - - if index is not None: - self.move_contributor(user=contributor, index=index, auth=auth, save=True) - - contributor.permission = reduce_permissions(self.get_permissions(contributor)) - contributor.bibliographic = self.get_visible(contributor) - contributor.node_id = self._id - contributor.index = self.contributors.index(contributor) - - if save: - contributor.save() - - return contributor - - def _get_spam_content(self, saved_fields): - from addons.wiki.models import NodeWikiPage - spam_fields = self.SPAM_CHECK_FIELDS if self.is_public and 'is_public' in saved_fields else self.SPAM_CHECK_FIELDS.intersection(saved_fields) - content = [] - for field in spam_fields: - if field == 'wiki_pages_current': - newest_wiki_page = None - for wiki_page_id in self.wiki_pages_current.values(): - wiki_page = NodeWikiPage.load(wiki_page_id) - if not newest_wiki_page: - newest_wiki_page = wiki_page - elif wiki_page.date > newest_wiki_page.date: - newest_wiki_page = wiki_page - if newest_wiki_page: - content.append(newest_wiki_page.raw_text(self).encode('utf-8')) - else: - content.append((getattr(self, field, None) or '').encode('utf-8')) - if not content: - return None - return ' '.join(content) - - def check_spam(self, user, saved_fields, request_headers): - if not settings.SPAM_CHECK_ENABLED: - return False - if settings.SPAM_CHECK_PUBLIC_ONLY and not self.is_public: - return False - if 'ham_confirmed' in user.system_tags: - return False - - content = self._get_spam_content(saved_fields) - if not content: - return - is_spam = self.do_check_spam( - user.fullname, - user.username, - content, - request_headers - ) - logger.info("Node ({}) '{}' smells like {} (tip: {})".format( - self._id, self.title.encode('utf-8'), 'SPAM' if is_spam else 'HAM', self.spam_pro_tip - )) - if is_spam: - self._check_spam_user(user) - return is_spam - - def _check_spam_user(self, user): - if ( - settings.SPAM_ACCOUNT_SUSPENSION_ENABLED - and (timezone.now() - user.date_confirmed) <= settings.SPAM_ACCOUNT_SUSPENSION_THRESHOLD - ): - self.set_privacy('private', log=False, save=False) - - # Suspend the flagged user for spam. - if 'spam_flagged' not in user.system_tags: - user.system_tags.append('spam_flagged') - if not user.is_disabled: - user.disable_account() - user.is_registered = False - mails.send_mail(to_addr=user.username, mail=mails.SPAM_USER_BANNED, user=user) - user.save() - - # Make public nodes private from this contributor - for node in user.contributed: - if self._id != node._id and len(node.contributors) == 1 and node.is_public: - node.set_privacy('private', log=False, save=True) - - def flag_spam(self): - """ Overrides SpamMixin#flag_spam. - """ - super(Node, self).flag_spam() - if settings.SPAM_FLAGGED_MAKE_NODE_PRIVATE: - self.set_privacy(Node.PRIVATE, auth=None, log=False, save=False, check_addons=False) - log = self.add_log( - action=NodeLog.MADE_PRIVATE, - params={ - 'project': self.parent_id, - 'node': self._primary_key, - }, - auth=None, - save=False - ) - log.should_hide = True - log.save() - - def confirm_spam(self, save=False): - super(Node, self).confirm_spam(save=False) - self.set_privacy(Node.PRIVATE, auth=None, log=False, save=False, check_addons=False) - log = self.add_log( - action=NodeLog.MADE_PRIVATE, - params={ - 'project': self.parent_id, - 'node': self._primary_key, - }, - auth=None, - save=False - ) - log.should_hide = True - log.save() - if save: - self.save() - - def set_privacy(self, permissions, auth=None, log=True, save=True, meeting_creation=False, check_addons=True): - """Set the permissions for this node. Also, based on meeting_creation, queues an email to user about abilities of - public projects. - - :param permissions: A string, either 'public' or 'private' - :param auth: All the auth information including user, API key. - :param bool log: Whether to add a NodeLog for the privacy change. - :param bool meeting_creation: Whether this was created due to a meetings email. - :param bool check_addons: Check and collect messages for addons? - """ - if auth and not self.has_permission(auth.user, ADMIN): - raise PermissionsError('Must be an admin to change privacy settings.') - if permissions == 'public' and not self.is_public: - if self.is_spam or (settings.SPAM_FLAGGED_MAKE_NODE_PRIVATE and self.is_spammy): - # TODO: Should say will review within a certain agreed upon time period. - raise NodeStateError('This project has been marked as spam. Please contact the help desk if you think this is in error.') - if self.is_registration: - if self.is_pending_embargo: - raise NodeStateError('A registration with an unapproved embargo cannot be made public.') - elif self.is_pending_registration: - raise NodeStateError('An unapproved registration cannot be made public.') - elif self.is_pending_embargo: - raise NodeStateError('An unapproved embargoed registration cannot be made public.') - elif self.is_embargoed: - # Embargoed registrations can be made public early - self.request_embargo_termination(auth=auth) - return False - self.is_public = True - self.keenio_read_key = self.generate_keenio_read_key() - elif permissions == 'private' and self.is_public: - if self.is_registration and not self.is_pending_embargo: - raise NodeStateError('Public registrations must be withdrawn, not made private.') - else: - self.is_public = False - self.keenio_read_key = '' - else: - return False - - # After set permissions callback - if check_addons: - for addon in self.get_addons(): - message = addon.after_set_privacy(self, permissions) - if message: - status.push_status_message(message, kind='info', trust=False) - - if log: - action = NodeLog.MADE_PUBLIC if permissions == 'public' else NodeLog.MADE_PRIVATE - self.add_log( - action=action, - params={ - 'project': self.parent_id, - 'node': self._primary_key, - }, - auth=auth, - save=False, - ) - if save: - self.save() - if auth and permissions == 'public': - project_signals.privacy_set_public.send(auth.user, node=self, meeting_creation=meeting_creation) - return True - - def admin_public_wiki(self, user): - return ( - self.has_addon('wiki') and - self.has_permission(user, 'admin') and - self.is_public - ) - - def include_wiki_settings(self, user): - """Check if node meets requirements to make publicly editable.""" - return ( - self.admin_public_wiki(user) or - any( - each.admin_public_wiki(user) - for each in self.get_descendants_recursive() - ) - ) - - # TODO: Move to wiki add-on - def get_wiki_page(self, name=None, version=None, id=None): - from addons.wiki.models import NodeWikiPage - - if name: - name = (name or '').strip() - key = to_mongo_key(name) - try: - if version and (isinstance(version, int) or version.isdigit()): - id = self.wiki_pages_versions[key][int(version) - 1] - elif version == 'previous': - id = self.wiki_pages_versions[key][-2] - elif version == 'current' or version is None: - id = self.wiki_pages_current[key] - else: - return None - except (KeyError, IndexError): - return None - return NodeWikiPage.load(id) - - # TODO: Move to wiki add-on - def update_node_wiki(self, name, content, auth): - """Update the node's wiki page with new content. - - :param page: A string, the page's name, e.g. ``"home"``. - :param content: A string, the posted content. - :param auth: All the auth information including user, API key. - """ - from addons.wiki.models import NodeWikiPage - - name = (name or '').strip() - key = to_mongo_key(name) - has_comments = False - current = None - - if key not in self.wiki_pages_current: - if key in self.wiki_pages_versions: - version = len(self.wiki_pages_versions[key]) + 1 - else: - version = 1 - else: - current = NodeWikiPage.load(self.wiki_pages_current[key]) - version = current.version + 1 - current.save() - if Comment.find(Q('root_target', 'eq', current._id)).count() > 0: - has_comments = True - - new_page = NodeWikiPage( - page_name=name, - version=version, - user=auth.user, - node=self, - content=content - ) - new_page.save() - - if has_comments: - Comment.update(Q('root_target', 'eq', current._id), data={'root_target': Guid.load(new_page._id)}) - Comment.update(Q('target', 'eq', current._id), data={'target': Guid.load(new_page._id)}) - - if current: - for contrib in self.contributors: - if contrib.comments_viewed_timestamp.get(current._id, None): - contrib.comments_viewed_timestamp[new_page._id] = contrib.comments_viewed_timestamp[current._id] - contrib.save() - del contrib.comments_viewed_timestamp[current._id] - - # check if the wiki page already exists in versions (existed once and is now deleted) - if key not in self.wiki_pages_versions: - self.wiki_pages_versions[key] = [] - self.wiki_pages_versions[key].append(new_page._primary_key) - self.wiki_pages_current[key] = new_page._primary_key - - self.add_log( - action=NodeLog.WIKI_UPDATED, - params={ - 'project': self.parent_id, - 'node': self._primary_key, - 'page': new_page.page_name, - 'page_id': new_page._primary_key, - 'version': new_page.version, - }, - auth=auth, - log_date=new_page.date, - save=False, - ) - self.save() - - # TODO: Move to wiki add-on - def rename_node_wiki(self, name, new_name, auth): - """Rename the node's wiki page with new name. - - :param name: A string, the page's name, e.g. ``"My Page"``. - :param new_name: A string, the new page's name, e.g. ``"My Renamed Page"``. - :param auth: All the auth information including user, API key. - - """ - # TODO: Fix circular imports - from addons.wiki.exceptions import ( - PageCannotRenameError, - PageConflictError, - PageNotFoundError, - ) - - name = (name or '').strip() - key = to_mongo_key(name) - new_name = (new_name or '').strip() - new_key = to_mongo_key(new_name) - page = self.get_wiki_page(name) - - if key == 'home': - raise PageCannotRenameError('Cannot rename wiki home page') - if not page: - raise PageNotFoundError('Wiki page not found') - if (new_key in self.wiki_pages_current and key != new_key) or new_key == 'home': - raise PageConflictError( - 'Page already exists with name {0}'.format( - new_name, - ) - ) - - # rename the page first in case we hit a validation exception. - old_name = page.page_name - page.rename(new_name) - - # TODO: merge historical records like update (prevents log breaks) - # transfer the old page versions/current keys to the new name. - if key != new_key: - self.wiki_pages_versions[new_key] = self.wiki_pages_versions[key] - del self.wiki_pages_versions[key] - self.wiki_pages_current[new_key] = self.wiki_pages_current[key] - del self.wiki_pages_current[key] - if key in self.wiki_private_uuids: - self.wiki_private_uuids[new_key] = self.wiki_private_uuids[key] - del self.wiki_private_uuids[key] - - self.add_log( - action=NodeLog.WIKI_RENAMED, - params={ - 'project': self.parent_id, - 'node': self._primary_key, - 'page': page.page_name, - 'page_id': page._primary_key, - 'old_page': old_name, - 'version': page.version, - }, - auth=auth, - save=False, - ) - self.save() - - def delete_node_wiki(self, name, auth): - name = (name or '').strip() - key = to_mongo_key(name) - page = self.get_wiki_page(key) - - del self.wiki_pages_current[key] - if key != 'home': - del self.wiki_pages_versions[key] - - self.add_log( - action=NodeLog.WIKI_DELETED, - params={ - 'project': self.parent_id, - 'node': self._primary_key, - 'page': page.page_name, - 'page_id': page._primary_key, - }, - auth=auth, - save=False, - ) - self.save() - - def get_stats(self, detailed=False): - if detailed: - raise NotImplementedError( - 'Detailed stats exist, but are not yet implemented.' - ) - else: - return get_basic_counters('node:%s' % self._primary_key) - - # TODO: Deprecate this; it duplicates much of what serialize_project already - # does - def serialize(self, auth=None): - """Dictionary representation of node that is nested within a NodeLog's - representation. - """ - # TODO: incomplete implementation - return { - 'id': str(self._primary_key), - 'category': self.category_display, - 'node_type': self.project_or_component, - 'url': self.url, - # TODO: Titles shouldn't contain escaped HTML in the first place - 'title': sanitize.unescape_entities(self.title), - 'path': self.path_above(auth), - 'api_url': self.api_url, - 'is_public': self.is_public, - 'is_registration': self.is_registration, - } - - def _initiate_retraction(self, user, justification=None): - """Initiates the retraction process for a registration - :param user: User who initiated the retraction - :param justification: Justification, if given, for retraction - """ - - retraction = Retraction( - initiated_by=user, - justification=justification or None, # make empty strings None - state=Retraction.UNAPPROVED - ) - retraction.save() # Save retraction so it has a primary key - self.retraction = retraction - self.save() # Set foreign field reference Node.retraction - admins = self.get_admin_contributors_recursive(unique_users=True) - for (admin, node) in admins: - retraction.add_authorizer(admin, node) - retraction.save() # Save retraction approval state - return retraction - - def retract_registration(self, user, justification=None, save=True): - """Retract public registration. Instantiate new Retraction object - and associate it with the respective registration. - """ - - if not self.is_registration or (not self.is_public and not (self.embargo_end_date or self.is_pending_embargo)): - raise NodeStateError('Only public or embargoed registrations may be withdrawn.') - - if self.root_id != self.id: - raise NodeStateError('Withdrawal of non-parent registrations is not permitted.') - - retraction = self._initiate_retraction(user, justification) - self.registered_from.add_log( - action=NodeLog.RETRACTION_INITIATED, - params={ - 'node': self.registered_from._id, - 'registration': self._id, - 'retraction_id': retraction._id, - }, - auth=Auth(user), - ) - self.retraction = retraction - if save: - self.save() - return retraction - - def _is_embargo_date_valid(self, end_date): - today = timezone.now() - if (end_date - today) >= settings.EMBARGO_END_DATE_MIN: - if (end_date - today) <= settings.EMBARGO_END_DATE_MAX: - return True - return False - - def _initiate_embargo(self, user, end_date, for_existing_registration=False, notify_initiator_on_complete=False): - """Initiates the retraction process for a registration - :param user: User who initiated the retraction - :param end_date: Date when the registration should be made public - """ - embargo = Embargo( - initiated_by=user, - end_date=datetime.datetime.combine(end_date, datetime.datetime.min.time()), - for_existing_registration=for_existing_registration, - notify_initiator_on_complete=notify_initiator_on_complete - ) - embargo.save() # Save embargo so it has a primary key - self.embargo = embargo - self.save() # Set foreign field reference Node.embargo - admins = self.get_admin_contributors_recursive(unique_users=True) - for (admin, node) in admins: - embargo.add_authorizer(admin, node) - embargo.save() # Save embargo's approval_state - return embargo - - def embargo_registration(self, user, end_date, for_existing_registration=False, notify_initiator_on_complete=False): - """Enter registration into an embargo period at end of which, it will - be made public - :param user: User initiating the embargo - :param end_date: Date when the registration should be made public - :raises: NodeStateError if Node is not a registration - :raises: PermissionsError if user is not an admin for the Node - :raises: ValidationValueError if end_date is not within time constraints - """ - - if not self.is_registration: - raise NodeStateError('Only registrations may be embargoed') - if not self.has_permission(user, 'admin'): - raise PermissionsError('Only admins may embargo a registration') - if not self._is_embargo_date_valid(end_date): - if (end_date - timezone.now()) >= settings.EMBARGO_END_DATE_MIN: - raise ValidationValueError('Registrations can only be embargoed for up to four years.') - raise ValidationValueError('Embargo end date must be at least three days in the future.') - - embargo = self._initiate_embargo(user, end_date, for_existing_registration=for_existing_registration, notify_initiator_on_complete=notify_initiator_on_complete) - - self.registered_from.add_log( - action=NodeLog.EMBARGO_INITIATED, - params={ - 'node': self.registered_from._id, - 'registration': self._id, - 'embargo_id': embargo._id, - }, - auth=Auth(user), - save=True, - ) - if self.is_public: - self.set_privacy('private', Auth(user)) - - def request_embargo_termination(self, auth): - """Initiates an EmbargoTerminationApproval to lift this Embargoed Registration's - embargo early.""" - if not self.is_embargoed: - raise NodeStateError('This node is not under active embargo') - if not self.root == self: - raise NodeStateError('Only the root of an embargoed registration can request termination') - - approval = EmbargoTerminationApproval( - initiated_by=auth.user, - embargoed_registration=self, - ) - admins = [admin for admin in self.root.get_admin_contributors_recursive(unique_users=True)] - for (admin, node) in admins: - approval.add_authorizer(admin, node=node) - approval.save() - approval.ask(admins) - self.embargo_termination_approval = approval - self.save() - return approval - - def terminate_embargo(self, auth): - """Handles the actual early termination of an Embargoed registration. - Adds a log to the registered_from Node. - """ - if not self.is_embargoed: - raise NodeStateError('This node is not under active embargo') - - self.registered_from.add_log( - action=NodeLog.EMBARGO_TERMINATED, - params={ - 'project': self._id, - 'node': self.registered_from_id, - 'registration': self._id, - }, - auth=None, - save=True - ) - self.embargo.mark_as_completed() - for node in self.node_and_primary_descendants(): - node.set_privacy( - Node.PUBLIC, - auth=None, - log=False, - save=True - ) - return True - - def get_active_contributors_recursive(self, unique_users=False, *args, **kwargs): - """Yield (admin, node) tuples for this node and - descendant nodes. Excludes contributors on node links and inactive users. - - :param bool unique_users: If True, a given admin will only be yielded once - during iteration. - """ - visited_user_ids = [] - for node in self.node_and_primary_descendants(*args, **kwargs): - for contrib in node.active_contributors(*args, **kwargs): - if unique_users: - if contrib._id not in visited_user_ids: - visited_user_ids.append(contrib._id) - yield (contrib, node) - else: - yield (contrib, node) - - def get_admin_contributors_recursive(self, unique_users=False, *args, **kwargs): - """Yield (admin, node) tuples for this node and - descendant nodes. Excludes contributors on node links and inactive users. - - :param bool unique_users: If True, a given admin will only be yielded once - during iteration. - """ - visited_user_ids = [] - for node in self.node_and_primary_descendants(*args, **kwargs): - for contrib in node.contributors: - if node.has_permission(contrib, ADMIN) and contrib.is_active: - if unique_users: - if contrib._id not in visited_user_ids: - visited_user_ids.append(contrib._id) - yield (contrib, node) - else: - yield (contrib, node) - - def get_admin_contributors(self, users): - """Return a set of all admin contributors for this node. Excludes contributors on node links and - inactive users. - """ - return ( - user for user in users - if self.has_permission(user, 'admin') and - user.is_active) - - def _initiate_approval(self, user, notify_initiator_on_complete=False): - end_date = timezone.now() + settings.REGISTRATION_APPROVAL_TIME - approval = RegistrationApproval( - initiated_by=user, - end_date=end_date, - notify_initiator_on_complete=notify_initiator_on_complete - ) - approval.save() # Save approval so it has a primary key - self.registration_approval = approval - self.save() # Set foreign field reference Node.registration_approval - admins = self.get_admin_contributors_recursive(unique_users=True) - for (admin, node) in admins: - approval.add_authorizer(admin, node=node) - approval.save() # Save approval's approval_state - return approval - - def require_approval(self, user, notify_initiator_on_complete=False): - if not self.is_registration: - raise NodeStateError('Only registrations can require registration approval') - if not self.has_permission(user, 'admin'): - raise PermissionsError('Only admins can initiate a registration approval') - - approval = self._initiate_approval(user, notify_initiator_on_complete) - - self.registered_from.add_log( - action=NodeLog.REGISTRATION_APPROVAL_INITIATED, - params={ - 'node': self.registered_from_id, - 'registration': self._id, - 'registration_approval_id': approval._id, - }, - auth=Auth(user), - save=True, - ) - - @property - def watches(self): - return WatchConfig.find(Q('node', 'eq', self._id)) - - institution_id = fields.StringField(unique=True, index=True) - institution_domains = fields.StringField(list=True) - institution_auth_url = fields.StringField(validate=URLValidator()) - institution_logout_url = fields.StringField(validate=URLValidator()) - institution_logo_name = fields.StringField() - institution_email_domains = fields.StringField(list=True) - institution_banner_name = fields.StringField() - - @classmethod - def find(cls, query=None, allow_institution=False, **kwargs): - if not allow_institution: - query = (query & Q('institution_id', 'eq', None)) if query else Q('institution_id', 'eq', None) - return super(Node, cls).find(query, **kwargs) - - @classmethod - def find_one(cls, query=None, allow_institution=False, **kwargs): - if not allow_institution: - query = (query & Q('institution_id', 'eq', None)) if query else Q('institution_id', 'eq', None) - return super(Node, cls).find_one(query, **kwargs) - - @classmethod - def find_by_institutions(cls, inst, query=None): - inst_node = inst.node - query = query & Q('_affiliated_institutions', 'eq', inst_node) if query else Q('_affiliated_institutions', 'eq', inst_node) - return cls.find(query, allow_institution=True) - - _affiliated_institutions = fields.ForeignField('node', list=True) - - @property - def affiliated_institutions(self): - ''' - Should behave as if this was a foreign field pointing to Institution - :return: this node's _affiliated_institutions wrapped with Institution as a list. - ''' - return AffiliatedInstitutionsList([Institution(node) for node in self._affiliated_institutions], obj=self, private_target='_affiliated_institutions') - - def add_affiliated_institution(self, inst, user, save=False, log=True): - if not user.is_affiliated_with_institution(inst): - raise UserNotAffiliatedError('User is not affiliated with {}'.format(inst.name)) - if inst not in self.affiliated_institutions: - self.affiliated_institutions.append(inst) - if log: - self.add_log( - action=NodeLog.AFFILIATED_INSTITUTION_ADDED, - params={ - 'node': self._primary_key, - 'institution': { - 'id': inst._id, - 'name': inst.name - } - }, - auth=Auth(user) - ) - if save: - self.save() - return True - - def remove_affiliated_institution(self, inst, user, save=False, log=True): - if inst in self.affiliated_institutions: - self.affiliated_institutions.remove(inst) - if log: - self.add_log( - action=NodeLog.AFFILIATED_INSTITUTION_REMOVED, - params={ - 'node': self._primary_key, - 'institution': { - 'id': inst._id, - 'name': inst.name - } - }, - auth=Auth(user) - ) - if save: - self.save() - return True - return False - - def institutions_url(self): - return self.absolute_api_v2_url + 'institutions/' - - def institutions_relationship_url(self): - return self.absolute_api_v2_url + 'relationships/institutions/' - - -@Node.subscribe('before_save') -def validate_permissions(schema, instance): - """Ensure that user IDs in `contributors` and `permissions` match. - - """ - node = instance - contributor_ids = set([user._id for user in node.contributors]) - permission_ids = set(node.permissions.keys()) - mismatched_contributors = contributor_ids.difference(permission_ids) - if mismatched_contributors: - raise ValidationValueError( - 'Contributors {0} missing from `permissions` on node {1}'.format( - ', '.join(mismatched_contributors), - node._id, - ) - ) - mismatched_permissions = permission_ids.difference(contributor_ids) - if mismatched_permissions: - raise ValidationValueError( - 'Permission keys {0} missing from `contributors` on node {1}'.format( - ', '.join(mismatched_contributors), - node._id, - ) - ) - - -@Node.subscribe('before_save') -def validate_visible_contributors(schema, instance): - """Ensure that user IDs in `contributors` and `visible_contributor_ids` - match. - - """ - node = instance - for user_id in node.visible_contributor_ids: - if user_id not in node.contributors: - raise ValidationValueError( - ('User {0} is in `visible_contributor_ids` but not in ' - '`contributors` on node {1}').format( - user_id, - node._id, - ) - ) - - -class WatchConfig(StoredObject): - - _id = fields.StringField(primary=True, default=lambda: str(ObjectId())) - node = fields.ForeignField('Node') - digest = fields.BooleanField(default=False) - immediate = fields.BooleanField(default=False) - - def __repr__(self): - return ''.format(self=self) - - -class PrivateLink(StoredObject): - - _id = fields.StringField(primary=True, default=lambda: str(ObjectId())) - date_created = fields.DateTimeField(auto_now_add=datetime.datetime.utcnow) - key = fields.StringField(required=True, unique=True) - name = fields.StringField() - is_deleted = fields.BooleanField(default=False) - anonymous = fields.BooleanField(default=False) - - nodes = fields.ForeignField('node', list=True) - creator = fields.ForeignField('user') - - @property - def node_ids(self): - node_ids = [node._id for node in self.nodes] - return node_ids - - def node_scale(self, node): - # node may be None if previous node's parent is deleted - if node is None or node.parent_id not in self.node_ids: - return -40 - else: - offset = 20 if node.parent_node is not None else 0 - return offset + self.node_scale(node.parent_node) - - def to_json(self): - return { - 'id': self._id, - 'date_created': iso8601format(self.date_created), - 'key': self.key, - 'name': sanitize.unescape_entities(self.name), - 'creator': {'fullname': self.creator.fullname, 'url': self.creator.profile_url}, - 'nodes': [{'title': x.title, 'url': x.url, 'scale': str(self.node_scale(x)) + 'px', 'category': x.category} - for x in self.nodes if not x.is_deleted], - 'anonymous': self.anonymous - } - - -class AlternativeCitation(StoredObject): - _id = fields.StringField(primary=True, default=lambda: str(ObjectId())) - name = fields.StringField(required=True, validate=MaxLengthValidator(256)) - text = fields.StringField(required=True, validate=MaxLengthValidator(2048)) - - def to_json(self): - return { - 'id': self._id, - 'name': self.name, - 'text': self.text - } - - -class DraftRegistrationLog(StoredObject): - """ Simple log to show status changes for DraftRegistrations - - field - _id - primary key - field - date - date of the action took place - field - action - simple action to track what happened - field - user - user who did the action - """ - _id = fields.StringField(primary=True, default=lambda: str(ObjectId())) - date = fields.DateTimeField(default=datetime.datetime.utcnow) - action = fields.StringField() - draft = fields.ForeignField('draftregistration', index=True) - user = fields.ForeignField('user') - - SUBMITTED = 'submitted' - REGISTERED = 'registered' - APPROVED = 'approved' - REJECTED = 'rejected' - - def __repr__(self): - return ('').format(self=self) - - -class DraftRegistration(StoredObject): - - _id = fields.StringField(primary=True, default=lambda: str(ObjectId())) - - URL_TEMPLATE = settings.DOMAIN + 'project/{node_id}/drafts/{draft_id}' - - datetime_initiated = fields.DateTimeField(auto_now_add=True) - datetime_updated = fields.DateTimeField(auto_now=True) - # Original Node a draft registration is associated with - branched_from = fields.ForeignField('node', index=True) - - initiator = fields.ForeignField('user', index=True) - - # Dictionary field mapping question id to a question's comments and answer - # { - # : { - # 'comments': [{ - # 'user': { - # 'id': , - # 'name': - # }, - # value: , - # lastModified: - # }], - # 'value': - # } - # } - registration_metadata = fields.DictionaryField(default=dict) - registration_schema = fields.ForeignField('metaschema') - registered_node = fields.ForeignField('node', index=True) - - approval = fields.ForeignField('draftregistrationapproval', default=None) - - # Dictionary field mapping extra fields defined in the MetaSchema.schema to their - # values. Defaults should be provided in the schema (e.g. 'paymentSent': false), - # and these values are added to the DraftRegistration - _metaschema_flags = fields.DictionaryField(default=None) - - def __repr__(self): - return ''.format(self=self) - - # lazily set flags - @property - def flags(self): - if not self._metaschema_flags: - self._metaschema_flags = {} - meta_schema = self.registration_schema - if meta_schema: - schema = meta_schema.schema - flags = schema.get('flags', {}) - dirty = False - for flag, value in flags.iteritems(): - if flag not in self._metaschema_flags: - self._metaschema_flags[flag] = value - dirty = True - if dirty: - self.save() - return self._metaschema_flags - - @flags.setter - def flags(self, flags): - self._metaschema_flags.update(flags) - - notes = fields.StringField() - - @property - def url(self): - return self.URL_TEMPLATE.format( - node_id=self.branched_from, - draft_id=self._id - ) - - @property - def absolute_url(self): - return urlparse.urljoin(settings.DOMAIN, self.url) - - @property - def absolute_api_v2_url(self): - node = self.branched_from - path = '/nodes/{}/draft_registrations/{}/'.format(node._id, self._id) - return api_v2_url(path) - - # used by django and DRF - def get_absolute_url(self): - return self.absolute_api_v2_url - - @property - def requires_approval(self): - return self.registration_schema.requires_approval - - @property - def is_pending_review(self): - return self.approval.is_pending_approval if (self.requires_approval and self.approval) else False - - @property - def is_approved(self): - if self.requires_approval: - if not self.approval: - return False - else: - return self.approval.is_approved - else: - return False - - @property - def is_rejected(self): - if self.requires_approval: - if not self.approval: - return False - else: - return self.approval.is_rejected - else: - return False - - @property - def status_logs(self): - """ List of logs associated with this node""" - return DraftRegistrationLog.find(Q('draft', 'eq', self._id)).sort('date') - - @classmethod - def create_from_node(cls, node, user, schema, data=None): - draft = cls( - initiator=user, - branched_from=node, - registration_schema=schema, - registration_metadata=data or {}, - ) - draft.save() - return draft - - def update_metadata(self, metadata): - changes = [] - # Prevent comments on approved drafts - if not self.is_approved: - for question_id, value in metadata.iteritems(): - old_value = self.registration_metadata.get(question_id) - if old_value: - old_comments = { - comment['created']: comment - for comment in old_value.get('comments', []) - } - new_comments = { - comment['created']: comment - for comment in value.get('comments', []) - } - old_comments.update(new_comments) - metadata[question_id]['comments'] = sorted( - old_comments.values(), - key=lambda c: c['created'] - ) - if old_value.get('value') != value.get('value'): - changes.append(question_id) - else: - changes.append(question_id) - self.registration_metadata.update(metadata) - return changes - - def submit_for_review(self, initiated_by, meta, save=False): - approval = DraftRegistrationApproval( - initiated_by=initiated_by, - meta=meta - ) - approval.save() - self.approval = approval - self.add_status_log(initiated_by, DraftRegistrationLog.SUBMITTED) - if save: - self.save() - - def register(self, auth, save=False): - node = self.branched_from - - # Create the registration - register = node.register_node( - schema=self.registration_schema, - auth=auth, - data=self.registration_metadata - ) - self.registered_node = register - self.add_status_log(auth.user, DraftRegistrationLog.REGISTERED) - if save: - self.save() - return register - - def approve(self, user): - self.approval.approve(user) - self.add_status_log(user, DraftRegistrationLog.APPROVED) - self.approval.save() - - def reject(self, user): - self.approval.reject(user) - self.add_status_log(user, DraftRegistrationLog.REJECTED) - self.approval.save() - - def add_status_log(self, user, action): - log = DraftRegistrationLog(action=action, user=user, draft=self) - log.save() - - def validate_metadata(self, *args, **kwargs): - """ - Validates draft's metadata - """ - return self.registration_schema.validate_metadata(*args, **kwargs) diff --git a/website/project/sanctions.py b/website/project/sanctions.py deleted file mode 100644 index 9461af58b240..000000000000 --- a/website/project/sanctions.py +++ /dev/null @@ -1,1009 +0,0 @@ -import functools -from dateutil.parser import parse as parse_date -from django.utils import timezone - -from modularodm import ( - fields, - Q, -) -from modularodm.exceptions import NoResultsFound -from modularodm.validators import MaxLengthValidator - -from framework.auth import Auth -from framework.exceptions import PermissionsError -from framework.mongo import ( - ObjectId, - StoredObject, - validators, -) - -from website import ( - mails, - settings, - tokens, -) -from website.exceptions import ( - InvalidSanctionApprovalToken, - InvalidSanctionRejectionToken, - NodeStateError, -) -from website.prereg import utils as prereg_utils - -VIEW_PROJECT_URL_TEMPLATE = settings.DOMAIN + '{node_id}/' - -class Sanction(StoredObject): - """Sanction class is a generic way to track approval states""" - # Tell modularodm not to attach backends - _meta = { - 'abstract': True, - } - - _id = fields.StringField(primary=True, default=lambda: str(ObjectId())) - - # Neither approved not cancelled - UNAPPROVED = 'unapproved' - # Has approval - APPROVED = 'approved' - # Rejected by at least one person - REJECTED = 'rejected' - # Embargo has been completed - COMPLETED = 'completed' - - state = fields.StringField( - default=UNAPPROVED, - validate=validators.choice_in(( - UNAPPROVED, - APPROVED, - REJECTED, - COMPLETED, - )) - ) - - DISPLAY_NAME = 'Sanction' - # SHORT_NAME must correspond with the associated foreign field to query against, - # e.g. Node.find_one(Q(sanction.SHORT_NAME, 'eq', sanction)) - SHORT_NAME = 'sanction' - - APPROVAL_NOT_AUTHORIZED_MESSAGE = 'This user is not authorized to approve this {DISPLAY_NAME}' - APPROVAL_INVALID_TOKEN_MESSAGE = 'Invalid approval token provided for this {DISPLAY_NAME}.' - REJECTION_NOT_AUTHORIZED_MESSAEGE = 'This user is not authorized to reject this {DISPLAY_NAME}' - REJECTION_INVALID_TOKEN_MESSAGE = 'Invalid rejection token provided for this {DISPLAY_NAME}.' - - # Controls whether or not the Sanction needs unanimous approval or just a single approval - ANY = 'any' - UNANIMOUS = 'unanimous' - mode = UNANIMOUS - - initiation_date = fields.DateTimeField(auto_now_add=timezone.now) - # Expiration date-- Sanctions in the UNAPPROVED state that are older than their end_date - # are automatically made ACTIVE by a daily cron job - # Use end_date=None for a non-expiring Sanction - end_date = fields.DateTimeField(default=None) - - # Sanction subclasses must have an initiated_by field - # initiated_by = fields.ForeignField('user', backref='initiated') - - # Expanded: Dictionary field mapping admin IDs their approval status and relevant tokens: - # { - # 'b3k97': { - # 'has_approved': False, - # 'approval_token': 'Pew7wj1Puf7DENUPFPnXSwa1rf3xPN', - # 'rejection_token': 'TwozClTFOic2PYxHDStby94bCQMwJy'} - # } - approval_state = fields.DictionaryField() - - def __repr__(self): - return ''.format(self=self) - - @property - def is_pending_approval(self): - return self.state == Sanction.UNAPPROVED - - @property - def is_approved(self): - return self.state == Sanction.APPROVED - - @property - def is_rejected(self): - return self.state == Sanction.REJECTED - - def approve(self, user): - raise NotImplementedError('Sanction subclasses must implement an approve method.') - - def reject(self, user): - raise NotImplementedError('Sanction subclasses must implement an approve method.') - - def _on_reject(self, user): - """Callback for rejection of a Sanction - - :param User user: - """ - raise NotImplementedError('Sanction subclasses must implement an #_on_reject method') - - def _on_complete(self, user): - """Callback for when a Sanction has approval and enters the ACTIVE state - - :param User user: - """ - raise NotImplementedError('Sanction subclasses must implement an #_on_complete method') - - def forcibly_reject(self): - self.state = Sanction.REJECTED - - -class TokenApprovableSanction(Sanction): - - # Tell modularodm not to attach backends - _meta = { - 'abstract': True, - } - - def _validate_authorizer(self, user): - """Subclasses may choose to provide extra restrictions on who can be an authorizer - - :return Boolean: True if user is allowed to be an authorizer else False - """ - return True - - def add_authorizer(self, user, node, approved=False, save=False): - """Add an admin user to this Sanction's approval state. - - :param User user: User to add. - :param Node registration: The pending registration node. - :param bool approved: Whether `user` has approved. - :param bool save: Whether to save this object. - """ - valid = self._validate_authorizer(user) - if valid and user._id not in self.approval_state: - self.approval_state[user._id] = { - 'has_approved': approved, - 'node_id': node._id, - 'approval_token': tokens.encode( - { - 'user_id': user._id, - 'sanction_id': self._id, - 'action': 'approve_{}'.format(self.SHORT_NAME) - } - ), - 'rejection_token': tokens.encode( - { - 'user_id': user._id, - 'sanction_id': self._id, - 'action': 'reject_{}'.format(self.SHORT_NAME) - } - ), - } - if save: - self.save() - return True - return False - - def remove_authorizer(self, user, save=False): - """Remove a user as an authorizer - - :param User user: - :return Boolean: True if user is removed else False - """ - if user._id not in self.approval_state: - return False - - del self.approval_state[user._id] - if save: - self.save() - return True - - def _on_approve(self, user, token): - """Callback for when a single user approves a Sanction. Calls #_on_complete under two conditions: - - mode is ANY and the Sanction has not already been cancelled - - mode is UNANIMOUS and all users have given approval - - :param User user: - :param str token: user's approval token - """ - if self.mode == self.ANY or all(authorizer['has_approved'] for authorizer in self.approval_state.values()): - self.state = Sanction.APPROVED - self._on_complete(user) - - def token_for_user(self, user, method): - """ - :param str method: 'approval' | 'rejection' - """ - try: - user_state = self.approval_state[user._id] - except KeyError: - raise PermissionsError(self.APPROVAL_NOT_AUTHORIZED_MESSAGE.format(DISPLAY_NAME=self.DISPLAY_NAME)) - return user_state['{0}_token'.format(method)] - - def approve(self, user, token): - """Add user to approval list if user is admin and token verifies.""" - try: - if self.approval_state[user._id]['approval_token'] != token: - raise InvalidSanctionApprovalToken(self.APPROVAL_INVALID_TOKEN_MESSAGE.format(DISPLAY_NAME=self.DISPLAY_NAME)) - except KeyError: - raise PermissionsError(self.APPROVAL_NOT_AUTHORIZED_MESSAGE.format(DISPLAY_NAME=self.DISPLAY_NAME)) - self.approval_state[user._id]['has_approved'] = True - self._on_approve(user, token) - - def reject(self, user, token): - """Cancels sanction if user is admin and token verifies.""" - try: - if self.approval_state[user._id]['rejection_token'] != token: - raise InvalidSanctionRejectionToken(self.REJECTION_INVALID_TOKEN_MESSAGE.format(DISPLAY_NAME=self.DISPLAY_NAME)) - except KeyError: - raise PermissionsError(self.REJECTION_NOT_AUTHORIZED_MESSAEGE.format(DISPLAY_NAME=self.DISPLAY_NAME)) - self.state = Sanction.REJECTED - self._on_reject(user) - - def _notify_authorizer(self, user, node): - pass - - def _notify_non_authorizer(self, user, node): - pass - - def ask(self, group): - """ - :param list group: List of (user, node) tuples containing contributors to notify about the - sanction. - """ - for contrib, node in group: - if contrib._id in self.approval_state: - self._notify_authorizer(contrib, node) - else: - self._notify_non_authorizer(contrib, node) - - -class EmailApprovableSanction(TokenApprovableSanction): - - # Tell modularodm not to attach backends - _meta = { - 'abstract': True, - } - - AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = None - NON_AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = None - - VIEW_URL_TEMPLATE = '' - APPROVE_URL_TEMPLATE = '' - REJECT_URL_TEMPLATE = '' - - # A flag to conditionally run a callback on complete - notify_initiator_on_complete = fields.BooleanField(default=False) - # Store a persistant copy of urls for use when needed outside of a request context. - # This field gets automagically updated whenever models approval_state is modified - # and the model is saved - # { - # 'abcde': { - # 'approve': [APPROVAL_URL], - # 'reject': [REJECT_URL], - # } - # } - stashed_urls = fields.DictionaryField(default=dict) - - @staticmethod - def _format_or_empty(template, context): - if context: - return template.format(**context) - return '' - - def _view_url(self, user_id, node): - return self._format_or_empty(self.VIEW_URL_TEMPLATE, self._view_url_context(user_id, node)) - - def _view_url_context(self, user_id, node): - return None - - def _approval_url(self, user_id): - return self._format_or_empty(self.APPROVE_URL_TEMPLATE, self._approval_url_context(user_id)) - - def _approval_url_context(self, user_id): - return None - - def _rejection_url(self, user_id): - return self._format_or_empty(self.REJECT_URL_TEMPLATE, self._rejection_url_context(user_id)) - - def _rejection_url_context(self, user_id): - return None - - def _send_approval_request_email(self, user, template, context): - mails.send_mail( - user.username, - template, - user=user, - **context - ) - - def _email_template_context(self, user, node, is_authorizer=False): - return {} - - def _notify_authorizer(self, authorizer, node): - context = self._email_template_context(authorizer, node, is_authorizer=True) - if self.AUTHORIZER_NOTIFY_EMAIL_TEMPLATE: - self._send_approval_request_email(authorizer, self.AUTHORIZER_NOTIFY_EMAIL_TEMPLATE, context) - else: - raise NotImplementedError - - def _notify_non_authorizer(self, user, node): - context = self._email_template_context(user, node) - if self.NON_AUTHORIZER_NOTIFY_EMAIL_TEMPLATE: - self._send_approval_request_email(user, self.NON_AUTHORIZER_NOTIFY_EMAIL_TEMPLATE, context) - else: - raise NotImplementedError - - def add_authorizer(self, user, node, **kwargs): - super(EmailApprovableSanction, self).add_authorizer(user, node, **kwargs) - self.stashed_urls[user._id] = { - 'view': self._view_url(user._id, node), - 'approve': self._approval_url(user._id), - 'reject': self._rejection_url(user._id) - } - self.save() - - def _notify_initiator(self): - raise NotImplementedError - - def _on_complete(self, *args): - if self.notify_initiator_on_complete: - self._notify_initiator() - - -class PreregCallbackMixin(object): - - def _notify_initiator(self): - from website.project.model import DraftRegistration - - registration = self._get_registration() - prereg_schema = prereg_utils.get_prereg_schema() - - draft = DraftRegistration.find_one( - Q('registered_node', 'eq', registration) - ) - - if prereg_schema in registration.registered_schema: - mails.send_mail( - draft.initiator.username, - mails.PREREG_CHALLENGE_ACCEPTED, - user=draft.initiator, - registration_url=registration.absolute_url, - mimetype='html' - ) - - def _email_template_context(self, user, node, is_authorizer=False, urls=None): - registration = self._get_registration() - prereg_schema = prereg_utils.get_prereg_schema() - if prereg_schema in registration.registered_schema: - return { - 'custom_message': ' as part of the Preregistration Challenge (https://cos.io/prereg)' - } - else: - return {} - -class Embargo(PreregCallbackMixin, EmailApprovableSanction): - """Embargo object for registrations waiting to go public.""" - - DISPLAY_NAME = 'Embargo' - SHORT_NAME = 'embargo' - - AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = mails.PENDING_EMBARGO_ADMIN - NON_AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = mails.PENDING_EMBARGO_NON_ADMIN - - VIEW_URL_TEMPLATE = VIEW_PROJECT_URL_TEMPLATE - APPROVE_URL_TEMPLATE = settings.DOMAIN + 'project/{node_id}/?token={token}' - REJECT_URL_TEMPLATE = settings.DOMAIN + 'project/{node_id}/?token={token}' - - initiated_by = fields.ForeignField('user', backref='embargoed') - for_existing_registration = fields.BooleanField(default=False) - - @property - def is_completed(self): - return self.state == self.COMPLETED - - @property - def embargo_end_date(self): - if self.state == self.APPROVED: - return self.end_date - return False - - # NOTE(hrybacki): Old, private registrations are grandfathered and do not - # require to be made public or embargoed. This field differentiates them - # from new registrations entering into an embargo field which should not - # show up in any search related fields. - @property - def pending_registration(self): - return not self.for_existing_registration and self.is_pending_approval - - def __repr__(self): - from website.project.model import Node - - parent_registration = None - try: - parent_registration = Node.find_one(Q('embargo', 'eq', self)) - except NoResultsFound: - pass - return ('').format( - parent_registration, - self.initiated_by, - self.end_date, - self._id - ) - - def _get_registration(self): - from website.project.model import Node - - return Node.find_one(Q('embargo', 'eq', self)) - - def _view_url_context(self, user_id, node): - registration = node or self._get_registration() - return { - 'node_id': registration._id - } - - def _approval_url_context(self, user_id): - user_approval_state = self.approval_state.get(user_id, {}) - approval_token = user_approval_state.get('approval_token') - if approval_token: - registration = self._get_registration() - node_id = user_approval_state.get('node_id', registration._id) - return { - 'node_id': node_id, - 'token': approval_token, - } - - def _rejection_url_context(self, user_id): - user_approval_state = self.approval_state.get(user_id, {}) - rejection_token = user_approval_state.get('rejection_token') - if rejection_token: - from website.project.model import Node - - root_registration = self._get_registration() - node_id = user_approval_state.get('node_id', root_registration._id) - registration = Node.load(node_id) - return { - 'node_id': registration.registered_from, - 'token': rejection_token, - } - - def _email_template_context(self, user, node, is_authorizer=False, urls=None): - context = super(Embargo, self)._email_template_context( - user, - node, - is_authorizer=is_authorizer - ) - urls = urls or self.stashed_urls.get(user._id, {}) - registration_link = urls.get('view', self._view_url(user._id, node)) - if is_authorizer: - approval_link = urls.get('approve', '') - disapproval_link = urls.get('reject', '') - approval_time_span = settings.EMBARGO_PENDING_TIME.days * 24 - - registration = self._get_registration() - - context.update({ - 'is_initiator': self.initiated_by == user, - 'initiated_by': self.initiated_by.fullname, - 'approval_link': approval_link, - 'project_name': registration.title, - 'disapproval_link': disapproval_link, - 'registration_link': registration_link, - 'embargo_end_date': self.end_date, - 'approval_time_span': approval_time_span, - }) - else: - context.update({ - 'initiated_by': self.initiated_by.fullname, - 'registration_link': registration_link, - 'embargo_end_date': self.end_date, - }) - return context - - def _on_reject(self, user): - from website.project.model import NodeLog - - parent_registration = self._get_registration() - parent_registration.registered_from.add_log( - action=NodeLog.EMBARGO_CANCELLED, - params={ - 'node': parent_registration.registered_from_id, - 'registration': parent_registration._id, - 'embargo_id': self._id, - }, - auth=Auth(user), - ) - # Remove backref to parent project if embargo was for a new registration - if not self.for_existing_registration: - parent_registration.delete_registration_tree(save=True) - parent_registration.registered_from = None - # Delete parent registration if it was created at the time the embargo was initiated - if not self.for_existing_registration: - parent_registration.is_deleted = True - parent_registration.save() - - def disapprove_embargo(self, user, token): - """Cancels retraction if user is admin and token verifies.""" - self.reject(user, token) - - def _on_complete(self, user): - from website.project.model import NodeLog - - parent_registration = self._get_registration() - if parent_registration.is_spammy: - raise NodeStateError('Cannot complete a spammy registration.') - - super(Embargo, self)._on_complete(user) - parent_registration.registered_from.add_log( - action=NodeLog.EMBARGO_APPROVED, - params={ - 'node': parent_registration.registered_from_id, - 'registration': parent_registration._id, - 'embargo_id': self._id, - }, - auth=Auth(self.initiated_by), - ) - self.save() - - def approve_embargo(self, user, token): - """Add user to approval list if user is admin and token verifies.""" - self.approve(user, token) - - def mark_as_completed(self): - self.state = Sanction.COMPLETED - self.save() - - -class Retraction(EmailApprovableSanction): - """ - Retraction object for public registrations. - Externally (specifically in user-facing language) retractions should be referred to as "Withdrawals", i.e. - "Retract Registration" -> "Withdraw Registration", "Retracted" -> "Withdrawn", etc. - """ - - DISPLAY_NAME = 'Retraction' - SHORT_NAME = 'retraction' - - AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = mails.PENDING_RETRACTION_ADMIN - NON_AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = mails.PENDING_RETRACTION_NON_ADMIN - - VIEW_URL_TEMPLATE = VIEW_PROJECT_URL_TEMPLATE - APPROVE_URL_TEMPLATE = settings.DOMAIN + 'project/{node_id}/?token={token}' - REJECT_URL_TEMPLATE = settings.DOMAIN + 'project/{node_id}/?token={token}' - - initiated_by = fields.ForeignField('user', backref='initiated') - justification = fields.StringField(default=None, validate=MaxLengthValidator(2048)) - - def __repr__(self): - from website.project.model import Node - - parent_registration = None - try: - parent_registration = Node.find_one(Q('retraction', 'eq', self)) - except NoResultsFound: - pass - return ('').format( - parent_registration, - self.initiated_by, - self._id - ) - - def _view_url_context(self, user_id, node): - from website.project.model import Node - - registration = Node.find_one(Q('retraction', 'eq', self)) - return { - 'node_id': registration._id - } - - def _approval_url_context(self, user_id): - user_approval_state = self.approval_state.get(user_id, {}) - approval_token = user_approval_state.get('approval_token') - if approval_token: - from website.project.model import Node - - root_registration = Node.find_one(Q('retraction', 'eq', self)) - node_id = user_approval_state.get('node_id', root_registration._id) - return { - 'node_id': node_id, - 'token': approval_token, - } - - def _rejection_url_context(self, user_id): - user_approval_state = self.approval_state.get(user_id, {}) - rejection_token = user_approval_state.get('rejection_token') - if rejection_token: - from website.project.model import Node - - root_registration = Node.find_one(Q('retraction', 'eq', self)) - node_id = user_approval_state.get('node_id', root_registration._id) - registration = Node.load(node_id) - return { - 'node_id': registration.registered_from._id, - 'token': rejection_token, - } - - def _email_template_context(self, user, node, is_authorizer=False, urls=None): - urls = urls or self.stashed_urls.get(user._id, {}) - registration_link = urls.get('view', self._view_url(user._id, node)) - if is_authorizer: - from website.project.model import Node - - approval_link = urls.get('approve', '') - disapproval_link = urls.get('reject', '') - approval_time_span = settings.RETRACTION_PENDING_TIME.days * 24 - - registration = Node.find_one(Q('retraction', 'eq', self)) - - return { - 'is_initiator': self.initiated_by == user, - 'initiated_by': self.initiated_by.fullname, - 'project_name': registration.title, - 'registration_link': registration_link, - 'approval_link': approval_link, - 'disapproval_link': disapproval_link, - 'approval_time_span': approval_time_span, - } - else: - return { - 'initiated_by': self.initiated_by.fullname, - 'registration_link': registration_link, - } - - def _on_reject(self, user): - from website.project.model import Node, NodeLog - - parent_registration = Node.find_one(Q('retraction', 'eq', self)) - parent_registration.registered_from.add_log( - action=NodeLog.RETRACTION_CANCELLED, - params={ - 'node': parent_registration.registered_from_id, - 'registration': parent_registration._id, - 'retraction_id': self._id, - }, - auth=Auth(user), - save=True, - ) - - def _on_complete(self, user): - from website.project.model import Node, NodeLog - - parent_registration = Node.find_one(Q('retraction', 'eq', self)) - - parent_registration.registered_from.add_log( - action=NodeLog.RETRACTION_APPROVED, - params={ - 'node': parent_registration.registered_from_id, - 'retraction_id': self._id, - 'registration': parent_registration._id - }, - auth=Auth(self.initiated_by), - ) - # Remove any embargoes associated with the registration - if parent_registration.embargo_end_date or parent_registration.is_pending_embargo: - parent_registration.embargo.state = self.REJECTED - parent_registration.registered_from.add_log( - action=NodeLog.EMBARGO_CANCELLED, - params={ - 'node': parent_registration.registered_from_id, - 'registration': parent_registration._id, - 'embargo_id': parent_registration.embargo._id, - }, - auth=Auth(self.initiated_by), - ) - parent_registration.embargo.save() - # Ensure retracted registration is public - # Pass auth=None because the registration initiator may not be - # an admin on components (component admins had the opportunity - # to disapprove the retraction by this point) - for node in parent_registration.node_and_primary_descendants(): - node.set_privacy('public', auth=None, save=True, log=False) - node.update_search() - - parent_registration.date_modified = timezone.now() - parent_registration.save() - - def approve_retraction(self, user, token): - self.approve(user, token) - - def disapprove_retraction(self, user, token): - self.reject(user, token) - - -class RegistrationApproval(PreregCallbackMixin, EmailApprovableSanction): - - DISPLAY_NAME = 'Approval' - SHORT_NAME = 'registration_approval' - - AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = mails.PENDING_REGISTRATION_ADMIN - NON_AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = mails.PENDING_REGISTRATION_NON_ADMIN - - VIEW_URL_TEMPLATE = VIEW_PROJECT_URL_TEMPLATE - APPROVE_URL_TEMPLATE = settings.DOMAIN + 'project/{node_id}/?token={token}' - REJECT_URL_TEMPLATE = settings.DOMAIN + 'project/{node_id}/?token={token}' - - initiated_by = fields.ForeignField('user', backref='registration_approved') - - def _get_registration(self): - from website.project.model import Node - - return Node.find_one(Q('registration_approval', 'eq', self)) - - def _view_url_context(self, user_id, node): - user_approval_state = self.approval_state.get(user_id, {}) - node_id = user_approval_state.get('node_id', node._id) - return { - 'node_id': node_id - } - - def _approval_url_context(self, user_id): - user_approval_state = self.approval_state.get(user_id, {}) - approval_token = user_approval_state.get('approval_token') - if approval_token: - registration = self._get_registration() - node_id = user_approval_state.get('node_id', registration._id) - return { - 'node_id': node_id, - 'token': approval_token, - } - - def _rejection_url_context(self, user_id): - user_approval_state = self.approval_state.get(user_id, {}) - rejection_token = self.approval_state.get(user_id, {}).get('rejection_token') - if rejection_token: - from website.project.model import Node - - root_registration = self._get_registration() - node_id = user_approval_state.get('node_id', root_registration._id) - registration = Node.load(node_id) - return { - 'node_id': registration.registered_from._id, - 'token': rejection_token, - } - - def _email_template_context(self, user, node, is_authorizer=False, urls=None): - context = super(RegistrationApproval, self)._email_template_context(user, node, is_authorizer, urls) - urls = urls or self.stashed_urls.get(user._id, {}) - registration_link = urls.get('view', self._view_url(user._id, node)) - if is_authorizer: - approval_link = urls.get('approve', '') - disapproval_link = urls.get('reject', '') - - approval_time_span = settings.REGISTRATION_APPROVAL_TIME.days * 24 - - registration = self._get_registration() - - context.update({ - 'is_initiator': self.initiated_by == user, - 'initiated_by': self.initiated_by.fullname, - 'registration_link': registration_link, - 'approval_link': approval_link, - 'disapproval_link': disapproval_link, - 'approval_time_span': approval_time_span, - 'project_name': registration.title, - }) - else: - context.update({ - 'initiated_by': self.initiated_by.fullname, - 'registration_link': registration_link, - }) - return context - - def _add_success_logs(self, node, user): - from website.project.model import NodeLog - - src = node.registered_from - src.add_log( - action=NodeLog.PROJECT_REGISTERED, - params={ - 'parent_node': src.parent_id, - 'node': src._primary_key, - 'registration': node._primary_key, - }, - auth=Auth(user), - save=False - ) - src.save() - - def _on_complete(self, user): - from website.project.model import NodeLog - register = self._get_registration() - if register.is_spammy: - raise NodeStateError('Cannot approve a a spammy registration') - super(RegistrationApproval, self)._on_complete(user) - self.state = Sanction.APPROVED - registered_from = register.registered_from - # Pass auth=None because the registration initiator may not be - # an admin on components (component admins had the opportunity - # to disapprove the registration by this point) - register.set_privacy('public', auth=None, log=False) - for child in register.get_descendants_recursive(lambda n: n.primary): - child.set_privacy('public', auth=None, log=False) - # Accounts for system actions where no `User` performs the final approval - auth = Auth(user) if user else None - registered_from.add_log( - action=NodeLog.REGISTRATION_APPROVAL_APPROVED, - params={ - 'node': registered_from._id, - 'registration': register._id, - 'registration_approval_id': self._id, - }, - auth=auth, - ) - for node in register.root.node_and_primary_descendants(): - self._add_success_logs(node, user) - node.update_search() # update search if public - - self.save() - - def _on_reject(self, user): - from website.project.model import NodeLog - - register = self._get_registration() - registered_from = register.registered_from - register.delete_registration_tree(save=True) - registered_from.add_log( - action=NodeLog.REGISTRATION_APPROVAL_CANCELLED, - params={ - 'node': registered_from._id, - 'registration': register._id, - 'registration_approval_id': self._id, - }, - auth=Auth(user), - ) - -class DraftRegistrationApproval(Sanction): - - mode = Sanction.ANY - - # Since draft registrations that require approval are not immediately registered, - # meta stores registration_choice and embargo_end_date (when applicable) - meta = fields.DictionaryField(default=dict) - - def _send_rejection_email(self, user, draft): - schema = draft.registration_schema - prereg_schema = prereg_utils.get_prereg_schema() - - if schema._id == prereg_schema._id: - mails.send_mail( - user.username, - mails.PREREG_CHALLENGE_REJECTED, - user=user, - draft_url=draft.absolute_url - ) - else: - raise NotImplementedError( - 'TODO: add a generic email template for registration approvals' - ) - - def approve(self, user): - if settings.PREREG_ADMIN_TAG not in user.system_tags: - raise PermissionsError('This user does not have permission to approve this draft.') - self.state = Sanction.APPROVED - self._on_complete(user) - - def reject(self, user): - if settings.PREREG_ADMIN_TAG not in user.system_tags: - raise PermissionsError('This user does not have permission to approve this draft.') - self.state = Sanction.REJECTED - self._on_reject(user) - - def _on_complete(self, user): - from website.project.model import DraftRegistration - - draft = DraftRegistration.find_one( - Q('approval', 'eq', self) - ) - auth = Auth(draft.initiator) - registration = draft.register( - auth=auth, - save=True - ) - registration_choice = self.meta['registration_choice'] - - if registration_choice == 'immediate': - sanction = functools.partial(registration.require_approval, draft.initiator) - elif registration_choice == 'embargo': - sanction = functools.partial( - registration.embargo_registration, - draft.initiator, - parse_date(self.meta.get('embargo_end_date'), ignoretz=True) - ) - else: - raise ValueError("'registration_choice' must be either 'embargo' or 'immediate'") - sanction(notify_initiator_on_complete=True) - - def _on_reject(self, user, *args, **kwargs): - from website.project.model import DraftRegistration - - # clear out previous registration options - self.meta = {} - self.save() - - draft = DraftRegistration.find_one( - Q('approval', 'eq', self) - ) - self._send_rejection_email(draft.initiator, draft) - -class EmbargoTerminationApproval(EmailApprovableSanction): - - DISPLAY_NAME = 'Embargo Termination Request' - SHORT_NAME = 'embargo_termination_approval' - - AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = mails.PENDING_EMBARGO_TERMINATION_ADMIN - NON_AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = mails.PENDING_EMBARGO_TERMINATION_NON_ADMIN - - VIEW_URL_TEMPLATE = VIEW_PROJECT_URL_TEMPLATE - APPROVE_URL_TEMPLATE = settings.DOMAIN + 'project/{node_id}/?token={token}' - REJECT_URL_TEMPLATE = settings.DOMAIN + 'project/{node_id}/?token={token}' - - embargoed_registration = fields.ForeignField('node') - - def _get_registration(self): - return self.embargoed_registration - - def _view_url_context(self, user_id, node): - registration = node or self._get_registration() - return { - 'node_id': registration._id - } - - def _approval_url_context(self, user_id): - user_approval_state = self.approval_state.get(user_id, {}) - approval_token = user_approval_state.get('approval_token') - if approval_token: - registration = self._get_registration() - node_id = user_approval_state.get('node_id', registration._id) - return { - 'node_id': node_id, - 'token': approval_token, - } - - def _rejection_url_context(self, user_id): - user_approval_state = self.approval_state.get(user_id, {}) - rejection_token = user_approval_state.get('rejection_token') - if rejection_token: - root_registration = self._get_registration() - node_id = user_approval_state.get('node_id', root_registration._id) - return { - 'node_id': node_id, - 'token': rejection_token, - } - - def _email_template_context(self, user, node, is_authorizer=False, urls=None): - context = super(EmbargoTerminationApproval, self)._email_template_context( - user, - node, - is_authorizer=is_authorizer - ) - urls = urls or self.stashed_urls.get(user._id, {}) - registration_link = urls.get('view', self._view_url(user._id, node)) - if is_authorizer: - approval_link = urls.get('approve', '') - disapproval_link = urls.get('reject', '') - approval_time_span = settings.EMBARGO_TERMINATION_PENDING_TIME.days * 24 - - registration = self._get_registration() - - context.update({ - 'is_initiator': self.initiated_by == user, - 'initiated_by': self.initiated_by.fullname, - 'approval_link': approval_link, - 'project_name': registration.title, - 'disapproval_link': disapproval_link, - 'registration_link': registration_link, - 'embargo_end_date': self.end_date, - 'approval_time_span': approval_time_span, - }) - else: - context.update({ - 'initiated_by': self.initiated_by.fullname, - 'registration_link': registration_link, - 'embargo_end_date': self.end_date, - }) - return context - - def _on_complete(self, user=None): - super(EmbargoTerminationApproval, self)._on_complete(user) - registration = self._get_registration() - registration.terminate_embargo(Auth(user) if user else None) - - def _on_reject(self, user=None): - # Just forget this ever happened. - self.embargoed_registration.embargo_termination_approval = None diff --git a/website/project/spam/__init__.py b/website/project/spam/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/website/project/spam/model.py b/website/project/spam/model.py deleted file mode 100644 index 3442706650b0..000000000000 --- a/website/project/spam/model.py +++ /dev/null @@ -1,214 +0,0 @@ -import abc -import logging - -from django.utils import timezone -from modularodm import fields -from modularodm.exceptions import ValidationTypeError, ValidationValueError - -from framework.mongo import StoredObject -from website import settings -from website.project.model import User -from website.util import akismet -from website.util.akismet import AkismetClientError - -logger = logging.getLogger(__name__) - - -def _get_client(): - return akismet.AkismetClient( - apikey=settings.AKISMET_APIKEY, - website=settings.DOMAIN, - verify=True - ) - - -def _validate_reports(value, *args, **kwargs): - for key, val in value.iteritems(): - if not User.load(key): - raise ValidationValueError('Keys must be user IDs') - if not isinstance(val, dict): - raise ValidationTypeError('Values must be dictionaries') - if ('category' not in val or 'text' not in val or 'date' not in val or 'retracted' not in val): - raise ValidationValueError( - ('Values must include `date`, `category`, ', - '`text`, `retracted` keys') - ) - - -class SpamStatus(object): - UNKNOWN = None - FLAGGED = 1 - SPAM = 2 - HAM = 4 - - -class SpamMixin(StoredObject): - """Mixin to add to objects that can be marked as spam. - """ - - _meta = { - 'abstract': True - } - - # # Node fields that trigger an update to search on save - # SPAM_UPDATE_FIELDS = { - # 'spam_status', - # } - spam_status = fields.IntegerField(default=SpamStatus.UNKNOWN, index=True) - spam_pro_tip = fields.StringField(default=None) - # Data representing the original spam indication - # - author: author name - # - author_email: email of the author - # - content: data flagged - # - headers: request headers - # - Remote-Addr: ip address from request - # - User-Agent: user agent from request - # - Referer: referrer header from request (typo +1, rtd) - spam_data = fields.DictionaryField(default=dict) - date_last_reported = fields.DateTimeField(default=None, index=True) - - # Reports is a dict of reports keyed on reporting user - # Each report is a dictionary including: - # - date: date reported - # - retracted: if a report has been retracted - # - category: What type of spam does the reporter believe this is - # - text: Comment on the comment - reports = fields.DictionaryField( - default=dict, validate=_validate_reports - ) - - def flag_spam(self): - # If ham and unedited then tell user that they should read it again - if self.spam_status == SpamStatus.UNKNOWN: - self.spam_status = SpamStatus.FLAGGED - - def remove_flag(self, save=False): - if self.spam_status != SpamStatus.FLAGGED: - return - for report in self.reports.values(): - if not report.get('retracted', True): - return - self.spam_status = SpamStatus.UNKNOWN - if save: - self.save() - - @property - def is_spam(self): - return self.spam_status == SpamStatus.SPAM - - @property - def is_spammy(self): - return self.spam_status in [SpamStatus.FLAGGED, SpamStatus.SPAM] - - def report_abuse(self, user, save=False, **kwargs): - """Report object is spam or other abuse of OSF - - :param user: User submitting report - :param save: Save changes - :param kwargs: Should include category and message - :raises ValueError: if user is reporting self - """ - if user == self.user: - raise ValueError('User cannot report self.') - self.flag_spam() - date = timezone.now() - report = {'date': date, 'retracted': False} - report.update(kwargs) - if 'text' not in report: - report['text'] = None - self.reports[user._id] = report - self.date_last_reported = report['date'] - if save: - self.save() - - def retract_report(self, user, save=False): - """Retract last report by user - - Only marks the last report as retracted because there could be - history in how the object is edited that requires a user - to flag or retract even if object is marked as HAM. - :param user: User retracting - :param save: Save changes - """ - if user._id in self.reports: - if not self.reports[user._id]['retracted']: - self.reports[user._id]['retracted'] = True - self.remove_flag() - else: - raise ValueError('User has not reported this content') - if save: - self.save() - - def confirm_ham(self, save=False): - # not all mixins will implement check spam pre-req, only submit ham when it was incorrectly flagged - if settings.SPAM_CHECK_ENABLED and self.spam_data and self.spam_status in [SpamStatus.FLAGGED, SpamStatus.SPAM]: - client = _get_client() - client.submit_ham( - user_ip=self.spam_data['headers']['Remote-Addr'], - user_agent=self.spam_data['headers'].get('User-Agent'), - referrer=self.spam_data['headers'].get('Referer'), - comment_content=self.spam_data['content'], - comment_author=self.spam_data['author'], - comment_author_email=self.spam_data['author_email'], - ) - logger.info('confirm_ham update sent') - self.spam_status = SpamStatus.HAM - if save: - self.save() - - def confirm_spam(self, save=False): - # not all mixins will implement check spam pre-req, only submit spam when it was incorrectly flagged - if settings.SPAM_CHECK_ENABLED and self.spam_data and self.spam_status in [SpamStatus.UNKNOWN, SpamStatus.HAM]: - client = _get_client() - client.submit_spam( - user_ip=self.spam_data['headers']['Remote-Addr'], - user_agent=self.spam_data['headers'].get('User-Agent'), - referrer=self.spam_data['headers'].get('Referer'), - comment_content=self.spam_data['content'], - comment_author=self.spam_data['author'], - comment_author_email=self.spam_data['author_email'], - ) - logger.info('confirm_spam update sent') - self.spam_status = SpamStatus.SPAM - if save: - self.save() - - @abc.abstractmethod - def check_spam(self, user, saved_fields, request_headers, save=False): - """Must return is_spam""" - pass - - def do_check_spam(self, author, author_email, content, request_headers): - if self.spam_status == SpamStatus.HAM: - return False - if self.is_spammy: - return True - - client = _get_client() - remote_addr = request_headers['Remote-Addr'] - user_agent = request_headers.get('User-Agent') - referer = request_headers.get('Referer') - try: - is_spam, pro_tip = client.check_comment( - user_ip=remote_addr, - user_agent=user_agent, - referrer=referer, - comment_content=content, - comment_author=author, - comment_author_email=author_email - ) - except AkismetClientError: - logger.exception('Error performing SPAM check') - return False - self.spam_pro_tip = pro_tip - self.spam_data['headers'] = { - 'Remote-Addr': remote_addr, - 'User-Agent': user_agent, - 'Referer': referer, - } - self.spam_data['content'] = content - self.spam_data['author'] = author - self.spam_data['author_email'] = author_email - if is_spam: - self.flag_spam() - return is_spam diff --git a/website/project/taxonomies/__init__.py b/website/project/taxonomies/__init__.py deleted file mode 100644 index 4d79b9d9f16a..000000000000 --- a/website/project/taxonomies/__init__.py +++ /dev/null @@ -1,63 +0,0 @@ -from modularodm import fields -from modularodm.exceptions import ValidationValueError - -from framework.mongo import ( - ObjectId, - StoredObject, - utils as mongo_utils -) - -from website.util import api_v2_url - - -@mongo_utils.unique_on(['text']) -class Subject(StoredObject): - _id = fields.StringField(primary=True, default=lambda: str(ObjectId())) - text = fields.StringField(required=True) - parents = fields.ForeignField('subject', list=True) - children = fields.ForeignField('subject', list=True) - - @property - def absolute_api_v2_url(self): - return api_v2_url('taxonomies/{}/'.format(self._id)) - - @property - def child_count(self): - return len(self.children) - - def get_absolute_url(self): - return self.absolute_api_v2_url - - @property - def hierarchy(self): - if self.parents: - return self.parents[0].hierarchy + [self._id] - return [self._id] - -def validate_subject_hierarchy(subject_hierarchy): - validated_hierarchy, raw_hierarchy = [], set(subject_hierarchy) - for subject_id in subject_hierarchy: - subject = Subject.load(subject_id) - if not subject: - raise ValidationValueError('Subject with id <{}> could not be found.'.format(subject_id)) - - if subject.parent: - continue - - raw_hierarchy.remove(subject_id) - validated_hierarchy.append(subject._id) - - while raw_hierarchy: - if not set(subject.children.values_list('_id', flat=True)) & raw_hierarchy: - raise ValidationValueError('Invalid subject hierarchy: {}'.format(subject_hierarchy)) - else: - for child in subject.children.filter(_id__in=raw_hierarchy): - subject = child - validated_hierarchy.append(child._id) - raw_hierarchy.remove(child._id) - break - if set(validated_hierarchy) == set(subject_hierarchy): - return - else: - raise ValidationValueError('Invalid subject hierarchy: {}'.format(subject_hierarchy)) - raise ValidationValueError('Unable to find root subject in {}'.format(subject_hierarchy)) diff --git a/website/project/views/contributor.py b/website/project/views/contributor.py index d4285cc15681..576156de3a2d 100644 --- a/website/project/views/contributor.py +++ b/website/project/views/contributor.py @@ -7,7 +7,6 @@ from framework import forms, status from framework.auth import cas -from framework.auth import User from framework.auth.core import get_user, generate_verification_key from framework.auth.decorators import block_bing_preview, collect_auth, must_be_logged_in from framework.auth.forms import PasswordForm, SetEmailAndPasswordForm @@ -17,8 +16,8 @@ from framework.flask import redirect # VOL-aware redirect from framework.sessions import session from framework.transactions.handlers import no_auto_transaction +from osf.models import AbstractNode as Node, OSFUser as User, PreprintService from website import mails, language, settings -from website.models import Node, PreprintService from website.notifications.utils import check_if_all_global_subscriptions_are_none from website.profile import utils as profile_utils from website.project.decorators import (must_have_permission, must_be_valid_project, must_not_be_registration, diff --git a/website/project/views/drafts.py b/website/project/views/drafts.py index c49e3576cf5f..913516e35de2 100644 --- a/website/project/views/drafts.py +++ b/website/project/views/drafts.py @@ -24,10 +24,9 @@ http_error_if_disk_saving_mode ) from website import language, settings -from website.models import NodeLog from website.prereg import utils as prereg_utils from website.project import utils as project_utils -from osf.models import MetaSchema, DraftRegistration, Sanction +from osf.models import NodeLog, MetaSchema, DraftRegistration, Sanction from website.project.metadata.schemas import ACTIVE_META_SCHEMAS from website.project.metadata.utils import serialize_meta_schema, serialize_draft_registration from website.project.utils import serialize_node diff --git a/website/project/views/node.py b/website/project/views/node.py index c2c6ae4865b2..2f8a27443726 100644 --- a/website/project/views/node.py +++ b/website/project/views/node.py @@ -36,12 +36,12 @@ from website.project.model import has_anonymous_link, NodeUpdateError, validate_title from website.project.forms import NewNodeForm from website.project.metadata.utils import serialize_meta_schemas -from website.models import Node, WatchConfig, PrivateLink, Comment +from osf.models import AbstractNode as Node, PrivateLink, Comment +from osf.models.licenses import serialize_node_license_record from website import settings from website.views import find_bookmark_collection, validate_page_num from website.views import serialize_node_summary from website.profile import utils -from website.project.licenses import serialize_node_license_record from website.util.sanitize import strip_html from website.util import rapply @@ -460,75 +460,6 @@ def project_set_privacy(auth, node, **kwargs): 'permissions': permissions, } - -@must_be_valid_project -@must_be_contributor_or_public -@must_not_be_registration -def watch_post(auth, node, **kwargs): - user = auth.user - watch_config = WatchConfig(node=node, - digest=request.json.get('digest', False), - immediate=request.json.get('immediate', False)) - try: - user.watch(watch_config) - except ValueError: # Node is already being watched - raise HTTPError(http.BAD_REQUEST) - - user.save() - - return { - 'status': 'success', - 'watchCount': node.watches.count() - } - - -@must_be_valid_project -@must_be_contributor_or_public -@must_not_be_registration -def unwatch_post(auth, node, **kwargs): - user = auth.user - watch_config = WatchConfig(node=node, - digest=request.json.get('digest', False), - immediate=request.json.get('immediate', False)) - try: - user.unwatch(watch_config) - except ValueError: # Node isn't being watched - raise HTTPError(http.BAD_REQUEST) - - return { - 'status': 'success', - 'watchCount': node.watches.count() - } - - -@must_be_valid_project -@must_be_contributor_or_public -@must_not_be_registration -def togglewatch_post(auth, node, **kwargs): - '''View for toggling watch mode for a node.''' - # TODO: refactor this, watch_post, unwatch_post (@mambocab) - user = auth.user - watch_config = WatchConfig( - node=node, - digest=request.json.get('digest', False), - immediate=request.json.get('immediate', False) - ) - try: - if user.is_watching(node): - user.unwatch(watch_config) - else: - user.watch(watch_config) - except ValueError: - raise HTTPError(http.BAD_REQUEST) - - user.save() - - return { - 'status': 'success', - 'watchCount': node.watches.count(), - 'watched': user.is_watching(node) - } - @must_be_valid_project @must_not_be_registration @must_have_permission(WRITE) diff --git a/website/project/views/register.py b/website/project/views/register.py index 0c0886b1ff7f..46367b4a718e 100644 --- a/website/project/views/register.py +++ b/website/project/views/register.py @@ -21,11 +21,10 @@ must_have_permission, must_not_be_registration, must_be_registration, ) -from website.identifiers.model import Identifier +from osf.models import Identifier, MetaSchema, NodeLog from website.identifiers.metadata import datacite_metadata_for_node from website.project.utils import serialize_node from website.util.permissions import ADMIN -from website.models import MetaSchema, NodeLog from website import language from website.project import signals as project_signals from website.project.metadata.schemas import _id_to_name diff --git a/website/project/views/tag.py b/website/project/views/tag.py index df4f3d96e9a2..1c23e9c3f348 100644 --- a/website/project/views/tag.py +++ b/website/project/views/tag.py @@ -5,8 +5,8 @@ from modularodm.exceptions import ValidationError from framework.auth.decorators import collect_auth +from osf.models import AbstractNode as Node, Tag from website.exceptions import InvalidTagError, NodeStateError, TagNotFoundError -from website.project.model import Node, Tag from website.project.decorators import ( must_be_valid_project, must_have_permission, must_not_be_registration ) diff --git a/website/routes.py b/website/routes.py index 6a27fea7ec6e..c228a7b443cc 100644 --- a/website/routes.py +++ b/website/routes.py @@ -25,6 +25,7 @@ from modularodm import Q from modularodm.exceptions import QueryException, NoResultsFound +from osf.models import Institution from website import util from website import prereg from website import settings @@ -33,7 +34,6 @@ from website.util import paths from website.util import sanitize from website import maintenance -from website.models import Institution from website import landing_pages as landing_page_views from website import views as website_views from website.citations import views as citation_views @@ -1561,22 +1561,6 @@ def make_url_map(app): '/project//node//permissions/beforepublic/', ], 'get', project_views.node.project_before_set_public, json_renderer), - ### Watching ### - Rule([ - '/project//watch/', - '/project//node//watch/' - ], 'post', project_views.node.watch_post, json_renderer), - - Rule([ - '/project//unwatch/', - '/project//node//unwatch/' - ], 'post', project_views.node.unwatch_post, json_renderer), - - Rule([ - '/project//togglewatch/', - '/project//node//togglewatch/' - ], 'post', project_views.node.togglewatch_post, json_renderer), - # Combined files Rule( [ diff --git a/website/search/elastic_search.py b/website/search/elastic_search.py index 1bf4338994b5..915c1e794a36 100644 --- a/website/search/elastic_search.py +++ b/website/search/elastic_search.py @@ -26,7 +26,7 @@ from osf.models import Institution from website import settings from website.filters import gravatar -from website.project.licenses import serialize_node_license_record +from osf.models.licenses import serialize_node_license_record from website.search import exceptions from website.search.util import build_query, clean_splitters from website.util import sanitize diff --git a/website/search_migration/migrate.py b/website/search_migration/migrate.py index 05e38e756b6a..4b86cef1a2d3 100644 --- a/website/search_migration/migrate.py +++ b/website/search_migration/migrate.py @@ -10,13 +10,11 @@ from modularodm.query.querydialect import DefaultQueryDialect as Q import website.search.search as search -from framework.auth import User from framework.mongo.utils import paginated from scripts import utils as script_utils +from osf.models import OSFUser as User, Institution, AbstractNode as Node from website import settings from website.app import init_app -from website.institutions.model import Institution -from website.models import Node from website.search.elastic_search import client as es_client from website.search.search import update_institution diff --git a/website/templates/emails/comments.html.mako b/website/templates/emails/comments.html.mako index 1691070b5097..ddd1f3455d44 100644 --- a/website/templates/emails/comments.html.mako +++ b/website/templates/emails/comments.html.mako @@ -1,4 +1,4 @@ -<% from website.models import User %> +<% from osf.models import OSFUser as User %> @@ -16,4 +16,4 @@ -
\ No newline at end of file + diff --git a/website/templates/emails/digest.txt.mako b/website/templates/emails/digest.txt.mako index ce08952d56e4..515d82325c7f 100644 --- a/website/templates/emails/digest.txt.mako +++ b/website/templates/emails/digest.txt.mako @@ -1,9 +1,9 @@ -<% from website.models import Node %> +<% from osf.models import AbstractNode%> <% def print_message(d, indent=0): message = '' for key in d['children']: - message += '\t' * indent + ' - ' + Node.load(key).title + ':' + message += '\t' * indent + ' - ' + AbstractNode.load(key).title + ':' if d['children'][key]['messages']: for m in d['children'][key]['messages']: message += '\n' +'\t' * (indent+1) + ' - '+ m @@ -22,7 +22,7 @@ From the Open Science Framework <%def name="build_message(d, indent=0)"> %for key in d['children']: - ${'\t' * indent + Node.load(key).title + ':'} + ${'\t' * indent + AbstractNode.load(key).title + ':'} %if d['children'][key]['messages']: %for m in d['children'][key]['messages']: ${'\t' * indent + '- ' + m['message'] + ' ' + m['timestamp'].strftime("%H:%M")} diff --git a/website/templates/emails/transactional.html.mako b/website/templates/emails/transactional.html.mako index 481cd07f0d27..5a66b4c9a34b 100644 --- a/website/templates/emails/transactional.html.mako +++ b/website/templates/emails/transactional.html.mako @@ -12,9 +12,9 @@

${node_title} - <% from website.project.model import Node %> - %if Node.load(node_id).parent_node: - in ${Node.load(node_id).parent_node.title} + <% from osf.models import AbstractNode %> + %if AbstractNode.load(node_id).parent_node: + in ${AbstractNode.load(node_id).parent_node.title} %endif

diff --git a/website/tokens/handlers.py b/website/tokens/handlers.py index 4e1a9b1ab0d7..84db951730cd 100644 --- a/website/tokens/handlers.py +++ b/website/tokens/handlers.py @@ -45,8 +45,8 @@ def retraction_handler(action, registration, registered_from): @must_be_logged_in def sanction_handler(kind, action, payload, encoded_token, auth, **kwargs): - from website.models import ( - Node, + from osf.models import ( + AbstractNode, Embargo, EmbargoTerminationApproval, RegistrationApproval, @@ -85,7 +85,7 @@ def sanction_handler(kind, action, payload, encoded_token, auth, **kwargs): do_action = getattr(sanction, action, None) if do_action: - registration = Node.find_one(Q(sanction.SHORT_NAME, 'eq', sanction)) + registration = AbstractNode.find_one(Q(sanction.SHORT_NAME, 'eq', sanction)) registered_from = registration.registered_from try: do_action(auth.user, encoded_token) diff --git a/website/util/__init__.py b/website/util/__init__.py index a4f1f2342338..cb6ce81bfb57 100644 --- a/website/util/__init__.py +++ b/website/util/__init__.py @@ -17,9 +17,6 @@ from modularodm import Q from modularodm.exceptions import NoResultsFound -# Keep me: Makes rubeus importable from website.util -from . import rubeus # noqa - logger = logging.getLogger(__name__) @@ -211,7 +208,7 @@ def disconnected_from_listeners(signal): def check_private_key_for_anonymized_link(private_key): - from website.project.model import PrivateLink + from osf.models import PrivateLink is_anonymous = False if private_key is not None: diff --git a/website/views.py b/website/views.py index d379ba8d2cb6..372804625723 100644 --- a/website/views.py +++ b/website/views.py @@ -21,9 +21,7 @@ from framework.auth.core import get_current_user_id from website.institutions.views import serialize_institution -from osf.models import BaseFileNode -from website.models import Guid -from website.models import Institution, PreprintService +from osf.models import BaseFileNode, Guid, Institution, PreprintService from website.settings import EXTERNAL_EMBER_APPS, INSTITUTION_DISPLAY_NODE_THRESHOLD from website.project.model import has_anonymous_link from website.util import permissions