Skip to content
Merged

Ci #940

Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
39 changes: 7 additions & 32 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -46,10 +46,14 @@ jobs:
pip install coverage coveralls pyarrow pandas # for one_cache tests
env:
PIP_USE_MIRRORS: true

- name: Ruff
run: |
ruff check ./alyx
- name: Run tests
run: |
sudo touch /var/log/alyx.log; sudo chmod 666 /var/log/alyx.log
sudo mkdir /var/log/alyx
sudo touch /var/log/alyx/django.log
sudo chmod 666 /var/log/alyx/django.log
cd alyx
cp ./alyx/environment_template.env ./alyx/.env
cp ../deploy/docker/settings-deploy.py alyx/settings.py
Expand All @@ -61,37 +65,8 @@ jobs:
DJANGO_SETTINGS_MODULE: alyx.settings
PYTHONPATH: $HOME/builds/cortexlab/alyx
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
APACHE_LOG_DIR: /var/log/alyx.log
APACHE_LOG_DIR: /var/log/alyx
POSTGRES_HOST: localhost
- name: Flake
run: |
cd alyx
flake8 .

- name: Generate new requirements_frozen.txt if needed
# Only runs when branch pushed to directly OR when a PR is merged
if: ${{ github.event_name == 'push' }}
run: |
pip freeze > requirements_frozen_temp.txt
if diff requirements_frozen.txt requirements_frozen_temp.txt > /dev/null; then
echo "requirements_frozen.txt unchanged"
rm requirements_frozen_temp.txt
echo "GIT_PUSH_NEEDED=false" >> "$GITHUB_ENV"
else
echo "requirements_frozen.txt is different, git push needed"
mv requirements_frozen_temp.txt requirements_frozen.txt
echo "GIT_PUSH_NEEDED=true" >> "$GITHUB_ENV"
fi

- name: Setup git/commit/push for requirements_frozen.txt if needed
# Only runs when requirements_frozen.txt is updated
if: env.GIT_PUSH_NEEDED == 'true'
run: |
git config user.name github-actions
git config user.email github-actions@github.com
git add requirements_frozen.txt
git commit -m "GitHub Actions generated requirements_frozen.txt"
git push

# Docker steps only run when master branch pushed to directly OR when a PR is merged
- name: Set Docker conditional value if needed
Expand Down
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,9 @@ alyx/alyx/settings.py
alyx/static/*/*
scripts/deployment_examples/docker-apache/settings*

# package if installed
alyx/alyx.egg-info

# other
*.pyc
.DS_Store
Expand Down
8 changes: 2 additions & 6 deletions .readthedocs.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -7,13 +7,9 @@ version: 2

# Set the version of Python and other tools you might need
build:
os: ubuntu-20.04
os: ubuntu-24.04
tools:
python: "3.9"
# You can also specify other tool versions:
# nodejs: "16"
# rust: "1.55"
# golang: "1.17"
python: "3.12"

# Build documentation in the docs/ directory with Sphinx
sphinx:
Expand Down
4 changes: 2 additions & 2 deletions alyx/actions/admin.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import base64
import json
import structlog
import logging

from django import forms
from django.conf import settings
Expand All @@ -27,7 +27,7 @@
from experiments.models import ProbeInsertion, FOV
from jobs.models import Task

logger = structlog.get_logger(__name__)
logger = logging.getLogger(__name__)


# Filters
Expand Down
4 changes: 2 additions & 2 deletions alyx/actions/models.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from datetime import timedelta
from math import inf

import structlog
import logging
from one.alf.spec import QC

from django.conf import settings
Expand All @@ -13,7 +13,7 @@
from misc.models import Lab, LabLocation, LabMember, Note


logger = structlog.get_logger(__name__)
logger = logging.getLogger(__name__)


def _default_water_type():
Expand Down
4 changes: 2 additions & 2 deletions alyx/actions/notifications.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
import structlog
import logging
from textwrap import dedent

from django.utils import timezone

from actions.models import create_notification


logger = structlog.get_logger(__name__)
logger = logging.getLogger(__name__)


def responsible_user_changed(subject, old_user, new_user):
Expand Down
4 changes: 2 additions & 2 deletions alyx/actions/tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -219,8 +219,8 @@ def test_notif_water_1(self):
def test_notif_water_2(self):
# If the last water admin was on June 3 at 12pm, the notification
# should be created after June 4 at 11am.
l = ((9, False), (10, False), (11, True), (12, True))
for (h, r) in l:
teupeul = ((9, False), (10, False), (11, True), (12, True))
for (h, r) in teupeul:
date = timezone.datetime(2018, 6, 4, h, 0, 0)
check_water_administration(self.subject, date=date)
notif = Notification.objects.last()
Expand Down
12 changes: 6 additions & 6 deletions alyx/actions/tests_rest.py
Original file line number Diff line number Diff line change
Expand Up @@ -548,15 +548,15 @@ def test_list_retrieve_water_restrictions(self):
def test_list_retrieve_lab_locations(self):
# test list
url = reverse("location-list")
l = self.ar(self.client.get(url))
self.assertTrue(len(l) > 0)
self.assertEqual(set(l[0].keys()), {"name", "json", "lab"})
reponse = self.ar(self.client.get(url))
self.assertTrue(len(reponse) > 0)
self.assertEqual(set(reponse[0].keys()), {"name", "json", "lab"})
# test detail
url = reverse("location-detail", args=[l[0]["name"]])
url = reverse("location-detail", args=[reponse[0]["name"]])
d = self.ar(self.client.get(url))
self.assertEqual(d, l[0])
self.assertEqual(d, reponse[0])
# test patch
url = reverse("location-detail", args=[l[0]["name"]])
url = reverse("location-detail", args=[reponse[0]["name"]])
json_dict = {
"string": "look at me! I'm a Json field",
"integer": 15,
Expand Down
4 changes: 2 additions & 2 deletions alyx/actions/water_control.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from dateutil.rrule import HOURLY
import functools
import io
import structlog
import logging
from operator import attrgetter, itemgetter
import os.path as op

Expand All @@ -17,7 +17,7 @@
import numpy as np


logger = structlog.get_logger(__name__)
logger = logging.getLogger(__name__)


PALETTE = {
Expand Down
8 changes: 4 additions & 4 deletions alyx/alyx/base.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import json
import structlog
import logging
import os
import os.path as op
from polymorphic.models import PolymorphicModel
Expand Down Expand Up @@ -30,7 +30,7 @@
from reversion.admin import VersionAdmin
from alyx import __version__ as version

logger = structlog.get_logger(__name__)
logger = logging.getLogger(__name__)

DATA_DIR = op.abspath(op.join(op.dirname(__file__), '../../data'))
DISABLE_MAIL = False # used for testing
Expand Down Expand Up @@ -211,8 +211,8 @@ def __init__(self, *args, **kwargs):
self.__dict__ = self


def flatten(l):
return [item for sublist in l for item in sublist]
def flatten(liste):
return [item for sublist in liste for item in sublist]


def _show_change(date_time, old, new):
Expand Down
6 changes: 3 additions & 3 deletions alyx/data/management/commands/files.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,9 @@ def _create_missing_file_records_main_globus(dry_run=False, lab=None):
labs = Lab.objects.filter(name=lab)
else:
labs = Lab.objects.all()
for l in labs:
repos = l.repositories.filter(globus_is_personal=False)
dsets = Dataset.objects.filter(session__lab=l)
for lab in labs:
repos = lab.repositories.filter(globus_is_personal=False)
dsets = Dataset.objects.filter(session__lab=lab)
for r in repos:
dsr = dsets.annotate(rep_count=Count('file_records',
filter=Q(file_records__data_repository=r)))
Expand Down
2 changes: 1 addition & 1 deletion alyx/data/management/commands/transfers_integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -289,7 +289,7 @@ def test_transfers(self):
ls_local = self.gtc.operation_ls(self.local_endpoint_id,
path=str(Path(exp_files[0]).parent))
ls_files = [ls['name'] for ls in ls_local['DATA']]
assert not Path(exp_files[0]).name in ls_files
assert Path(exp_files[0]).name not in ls_files

dsets_to_del = Dataset.objects.filter(session__lab__name=self.lab_name,
name='spikes.times.npy')
Expand Down
4 changes: 2 additions & 2 deletions alyx/data/models.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import structlog
import logging
from one.alf.spec import QC

from django.core.validators import RegexValidator
Expand All @@ -11,7 +11,7 @@
from actions.models import Session
from alyx.base import BaseModel, modify_fields, BaseManager, CharNullField, BaseQuerySet, ALF_SPEC

logger = structlog.get_logger(__name__)
logger = logging.getLogger(__name__)


def _related_string(field):
Expand Down
4 changes: 2 additions & 2 deletions alyx/data/transfers.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import json
import structlog
import logging
import os
import os.path as op
import re
Expand All @@ -18,7 +18,7 @@
from rest_framework.response import Response
from actions.models import Session

logger = structlog.get_logger(__name__)
logger = logging.getLogger(__name__)

# Login
# ------------------------------------------------------------------------------------------------
Expand Down
4 changes: 2 additions & 2 deletions alyx/data/views.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import structlog
import logging
import re

from django.contrib.auth import get_user_model
Expand Down Expand Up @@ -37,7 +37,7 @@
_create_dataset_file_records, bulk_sync, _check_dataset_protected,
_get_name_collection_revision)

logger = structlog.get_logger(__name__)
logger = logging.getLogger(__name__)

# DataRepositoryType
# ------------------------------------------------------------------------------------------------
Expand Down
4 changes: 2 additions & 2 deletions alyx/experiments/models.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import structlog
import logging
import uuid

from django.db import models
Expand All @@ -13,7 +13,7 @@
from alyx.base import BaseModel, BaseManager
from actions.models import ChronicRecording

logger = structlog.get_logger(__name__)
logger = logging.getLogger(__name__)

X_HELP_TEXT = ("brain surface medio-lateral coordinate (um) of"
"the insertion, right +, relative to Bregma")
Expand Down
20 changes: 10 additions & 10 deletions alyx/misc/management/commands/download_gsheets.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,8 +42,8 @@ def pad(s):
return re.sub(r'\_([0-9]+)$', lambda m: '_%04d' % int(m.group(1)), s)


def flatten(l):
return [item for sublist in l for item in sublist]
def flatten(line):
return [item for sublist in line for item in sublist]


def get_username(initials):
Expand Down Expand Up @@ -72,8 +72,8 @@ def parse(date_str, time=False):
return ''
try:
ret = parse_(date_str)
except:
logger.warn("Could not parse date %s.", date_str)
except Exception:
logger.warning("Could not parse date %s.", date_str)
return ''
if not time:
return ret.strftime("%Y-%m-%d")
Expand Down Expand Up @@ -120,11 +120,11 @@ def sheet_to_table(wks, header_line=0, first_line=2):
table = []
headers = rows[header_line]
for row in rows[first_line:]:
l = {headers[i].strip(): row[i].strip() for i in range(len(headers))}
line = {headers[i].strip(): row[i].strip() for i in range(len(headers))}
# Empty line = end of table.
if all(_ == '' for _ in l.values()):
if all(_ == '' for _ in line.values()):
break
table.append(Bunch(l))
table.append(Bunch(line))
return table


Expand Down Expand Up @@ -366,9 +366,9 @@ def _get_subjects(self, line_tables):
def _get_line(self, line):
out = self.lines.get(line, None)
if not out:
for l in self.lines.values():
if l.auto_name == line:
return l
for line in self.lines.values():
if line.auto_name == line:
return line
return out

def _get_litters(self, subjects):
Expand Down
18 changes: 9 additions & 9 deletions alyx/misc/management/commands/report.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,28 +18,28 @@
logger = logging.getLogger(__name__)


def _repr_log_entry(l):
if l.is_addition():
def _repr_log_entry(log):
if log.is_addition():
action = 'Added'
elif l.is_change():
elif log.is_change():
action = 'Changed'
elif l.is_deletion():
elif log.is_deletion():
action = 'Deleted'
changed = json.loads(l.change_message or '[]')
changed = json.loads(log.change_message or '[]')
if changed and changed[0].get('changed', {}):
changed = ('(%s)' %
(', '.join(changed[0].get('changed', {}).get('fields', {}))))
else:
changed = ''
s = '%02d:%02d - %s <%s> %s' % (
l.action_time.hour,
l.action_time.minute,
log.action_time.hour,
log.action_time.minute,
action,
# l.content_type,

# NOTE: use this when debugging repr (the repr string is directly saved in the LogEntry)
# str(l.get_edited_object()),
l.object_repr,
log.object_repr,

changed,
)
Expand Down Expand Up @@ -206,7 +206,7 @@ def make_past_changes(self, user):
logs = LogEntry.objects.filter(user=user,
action_time__date=yesterday,
).order_by('action_time')
return 'Your actions yesterday:\n\n' + '\n'.join('* ' + _repr_log_entry(l) for l in logs)
return 'Your actions yesterday:\n\n' + '\n'.join('* ' + _repr_log_entry(log) for log in logs)

def make_training(self, user):
"""Send training report to the specified user."""
Expand Down
Loading