Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 16 additions & 5 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -846,24 +846,35 @@ jobs:
run: |
echo "Listing downloaded files:"
find reports-download -type f
cp reports-download/.coverage* . 2>/dev/null || echo "No coverage files found"
coverage combine
coverage report
cp reports-download/.coverage* $GITHUB_WORKSPACE/ 2>/dev/null || echo "No coverage files found"
cd $GITHUB_WORKSPACE
coverage combine || echo "::warning::No coverage data to combine"
coverage report || echo "::warning::No coverage report generated"
ls -la .coverage* || true

- name: Verify .coverage file exists
run: |
if [ ! -f $GITHUB_WORKSPACE/.coverage ]; then
echo "::error::No .coverage file found after combining coverage!"
exit 1
fi

- name: Upload combined coverage report
uses: actions/upload-artifact@v4
with:
name: backend-coverage-report
path: .coverage
path: ${{ github.workspace }}/.coverage
include-hidden-files: true
retention-days: 30
overwrite: true

- name: Comment coverage report on PR
uses: py-cov-action/python-coverage-comment-action@v3
if: github.event_name == 'pull_request'
uses: py-cov-action/python-coverage-comment-action@v3.38
with:
GITHUB_TOKEN: ${{ github.token }}
MERGE_COVERAGE_FILES: false
COVERAGE_PATH: ${{ github.workspace }}

# ==========================================================================
# Build and publish stage - builds production grade images and publishes
Expand Down
17 changes: 9 additions & 8 deletions backend/.flake8
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,12 @@
extend-ignore = E203, W503, F541, E501
max-doc-length = 88
per-file-ignores =
tests/*: F841
../premium/backend/tests/*: F841
../enterprise/backend/tests/*: F841
src/baserow/contrib/database/migrations/*: X1
src/baserow/core/migrations/*: X1
src/baserow/core/psycopg.py: BRP001
tests/*: F841, BAI001
../premium/backend/tests/*: F841, BAI001
../enterprise/backend/tests/*: F841, BAI001
src/baserow/contrib/database/migrations/*: BDC001
src/baserow/core/migrations/*: BDC001
src/baserow/core/psycopg.py: BPG001
exclude =
.git,
__pycache__,
Expand All @@ -16,6 +16,7 @@ exclude =

[flake8:local-plugins]
extension =
X1 = flake8_baserow:DocstringPlugin
BRP001 = flake8_baserow:BaserowPsycopgChecker
BDC001 = flake8_baserow:DocstringPlugin
BPG001 = flake8_baserow:BaserowPsycopgChecker
BAI001 = flake8_baserow:BaserowAIImportsChecker
paths = ./flake8_plugins
2 changes: 1 addition & 1 deletion backend/flake8_plugins/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
from .flake8_baserow import DocstringPlugin, BaserowPsycopgChecker
from .flake8_baserow import DocstringPlugin, BaserowPsycopgChecker, BaserowAIImportsChecker
3 changes: 2 additions & 1 deletion backend/flake8_plugins/flake8_baserow/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from .docstring import Plugin as DocstringPlugin
from .psycopg import BaserowPsycopgChecker
from .ai_imports import BaserowAIImportsChecker

__all__ = ["DocstringPlugin", "BaserowPsycopgChecker"]
__all__ = ["DocstringPlugin", "BaserowPsycopgChecker", "BaserowAIImportsChecker"]
86 changes: 86 additions & 0 deletions backend/flake8_plugins/flake8_baserow/ai_imports.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
import ast
from typing import Iterator, Tuple, Any


class BaserowAIImportsChecker:
"""
Flake8 plugin to ensure dspy and litellm are only imported locally within
functions/methods, not at module level.
"""

name = "flake8-baserow-ai-imports"
version = "0.1.0"

def __init__(self, tree: ast.AST, filename: str):
self.tree = tree
self.filename = filename

def run(self) -> Iterator[Tuple[int, int, str, Any]]:
"""Check for global imports of dspy and litellm."""
for node in ast.walk(self.tree):
# Check if this is a module-level import (not inside a function/method)
if self._is_global_import(node):
if isinstance(node, ast.Import):
for alias in node.names:
if self._is_ai_module(alias.name):
yield (
node.lineno,
node.col_offset,
f"BAI001 {alias.name} must be imported locally within functions/methods, not globally",
type(self),
)
elif isinstance(node, ast.ImportFrom):
if node.module and self._is_ai_module(node.module):
yield (
node.lineno,
node.col_offset,
f"BAI001 {node.module} must be imported locally within functions/methods, not globally",
type(self),
)

def _is_ai_module(self, module_name: str) -> bool:
"""Check if the module is dspy or litellm (including submodules)."""
if not module_name:
return False
return (
module_name == "dspy"
or module_name.startswith("dspy.")
or module_name == "litellm"
or module_name.startswith("litellm.")
)

def _is_global_import(self, node: ast.AST) -> bool:
"""
Check if an import node is at global scope.
Returns True if the import is not nested inside a function or method.
"""
if not isinstance(node, (ast.Import, ast.ImportFrom)):
return False

# Walk up the AST to find if this import is inside a function/method
# We need to check the parent nodes, but ast.walk doesn't provide parent info
# So we'll traverse the tree differently
return self._check_node_is_global(self.tree, node)

def _check_node_is_global(
self, root: ast.AST, target: ast.AST, in_function: bool = False
) -> bool:
"""
Recursively check if target node is at global scope.
Returns True if the target is found at global scope (not in a function).
"""
if root is target:
return not in_function

# Check if we're entering a function/method
new_in_function = in_function or isinstance(
root, (ast.FunctionDef, ast.AsyncFunctionDef)
)

# Recursively check all child nodes
for child in ast.iter_child_nodes(root):
result = self._check_node_is_global(child, target, new_in_function)
if result is not None:
return result

return None
2 changes: 1 addition & 1 deletion backend/flake8_plugins/flake8_baserow/docstring.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@


DocstringType = Union[ast.Constant, ast.Str]
ERR_MSG = "X1 - Baserow plugin: missing empty line after docstring"
ERR_MSG = "BDC001 - Baserow plugin: missing empty line after docstring"


class Token:
Expand Down
19 changes: 10 additions & 9 deletions backend/flake8_plugins/flake8_baserow/psycopg.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
import ast
from typing import Iterator, Tuple, Any


class BaserowPsycopgChecker:
name = 'flake8-baserow-psycopg'
version = '0.1.0'
name = "flake8-baserow-psycopg"
version = "0.1.0"

def __init__(self, tree: ast.AST, filename: str):
self.tree = tree
Expand All @@ -13,18 +14,18 @@ def run(self) -> Iterator[Tuple[int, int, str, Any]]:
for node in ast.walk(self.tree):
if isinstance(node, ast.Import):
for alias in node.names:
if alias.name in ('psycopg', 'psycopg2'):
if alias.name in ("psycopg", "psycopg2"):
yield (
node.lineno,
node.col_offset,
'BRP001 Import psycopg/psycopg2 from baserow.core.psycopg instead',
type(self)
"BPG001 Import psycopg/psycopg2 from baserow.core.psycopg instead",
type(self),
)
elif isinstance(node, ast.ImportFrom):
if node.module in ('psycopg', 'psycopg2'):
if node.module in ("psycopg", "psycopg2"):
yield (
node.lineno,
node.col_offset,
'BRP001 Import psycopg/psycopg2 from baserow.core.psycopg instead',
type(self)
)
"BPG001 Import psycopg/psycopg2 from baserow.core.psycopg instead",
type(self),
)
Loading
Loading