Skip to content

Commit

Permalink
Feat/testing backend (#446)
Browse files Browse the repository at this point in the history
* feat(pytest): added

* feat(brains): added tests

* feat(actions): pytest
  • Loading branch information
StanGirard committed Jul 2, 2023
1 parent bab76ba commit e076bbe
Show file tree
Hide file tree
Showing 16 changed files with 423 additions and 61 deletions.
40 changes: 40 additions & 0 deletions .github/workflows/pytest.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
name: Pytest

on:
push:
branches: [ main ]
pull_request:
branches: [ main ]
jobs:
build:
runs-on: ubuntu-latest
defaults:
run:
working-directory: ./backend
environment: preview
strategy:
matrix:
python-version: [ "3.11"]

steps:
- uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
cache: 'pip'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install pytest
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
- name: Test with pytest
env:
SUPABASE_URL: ${{vars.SUPABASE_URL}}
SUPABASE_SERVICE_KEY: ${{vars.SUPABASE_SERVICE_KEY}}
OPENAI_API_KEY: ${{vars.OPENAI_API_KEY}}
ANTHROPIC_API_KEY: ${{vars.ANTHROPIC_API_KEY}}
JWT_SECRET_KEY: ${{vars.JWT_SECRET_KEY}}
CI_TEST_API_KEY: ${{vars.CI_TEST_API_KEY}}
run: |
pytest
8 changes: 6 additions & 2 deletions .github/workflows/vitest.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,14 +12,18 @@ jobs:
defaults:
run:
working-directory: ./frontend
strategy:
matrix:
node-version: [18]

steps:
- uses: actions/checkout@v3
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v3
with:
node-version: 18
node-version: ${{ matrix.node-version }}
cache: 'yarn'
cache-dependency-path: frontend/yarn.lock
- run: yarn
- run: yarn run test-unit
- run: yarn run test-unit
- run: yarn run build
Empty file added backend/auth/__init__.py
Empty file.
8 changes: 5 additions & 3 deletions backend/auth/api_key_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,10 @@ async def verify_api_key(
result.data[0]["creation_time"], "%Y-%m-%dT%H:%M:%S"
).date()

# Check if the API key was created today: Todo remove this check and use deleted_time instead.
if api_key_creation_date == current_date:
# Check if the API key was created in the month of the current date
if (api_key_creation_date.month == current_date.month) and (
api_key_creation_date.year == current_date.year
):
return True
return False
except DateError:
Expand Down Expand Up @@ -62,7 +64,7 @@ async def get_user_from_api_key(
)

return (
{"email": user_email_data.data[0]["email"]}
{"email": user_email_data.data[0]["email"], "sub": user_id}
if user_email_data.data
else {"email": None}
)
4 changes: 3 additions & 1 deletion backend/auth/auth_bearer.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,4 +55,6 @@ def get_test_user(self):


def get_current_user(credentials: dict = Depends(AuthBearer())) -> User:
return User(email=credentials.get("email", "none"), id=credentials.get("sub", "none"))
return User(
email=credentials.get("email", "none"), id=credentials.get("sub", "none")
)
Empty file added backend/crawl/__init__.py
Empty file.
171 changes: 129 additions & 42 deletions backend/models/brains.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
class Brain(BaseModel):
id: Optional[UUID] = None
name: Optional[str] = "Default brain"
status: Optional[str]= "public"
status: Optional[str] = "public"
model: Optional[str] = "gpt-3.5-turbo-0613"
temperature: Optional[float] = 0.0
max_tokens: Optional[int] = 256
Expand All @@ -31,9 +31,9 @@ def commons(self) -> CommonsDep:
@property
def brain_size(self):
self.get_unique_brain_files()
current_brain_size = sum(float(doc['size']) for doc in self.files)
current_brain_size = sum(float(doc["size"]) for doc in self.files)

print('current_brain_size', current_brain_size)
print("current_brain_size", current_brain_size)
return current_brain_size

@property
Expand Down Expand Up @@ -65,30 +65,86 @@ def get_brain_details(self):
)
return response.data

def delete_brain(self):
self.commons["supabase"].table("brains").delete().match(
{"brain_id": self.id}
).execute()
def delete_brain(self, user_id):
print("user_id", user_id)
print("self.id", self.id)
results = (
self.commons["supabase"]
.table("brains_users")
.select("*")
.match({"brain_id": self.id, "user_id": user_id, "rights": "Owner"})
.execute()
)
if len(results.data) == 0:
print("You are not the owner of this brain.")
return {"message": "You are not the owner of this brain."}
else:
results = (
self.commons["supabase"]
.table("brains_vectors")
.delete()
.match({"brain_id": self.id})
.execute()
)
print("results", results)

results = (
self.commons["supabase"]
.table("brains_users")
.delete()
.match({"brain_id": self.id})
.execute()
)
print("results", results)

results = (
self.commons["supabase"]
.table("brains")
.delete()
.match({"brain_id": self.id})
.execute()
)
print("results", results)

def create_brain(self):
commons = common_dependencies()
response = commons["supabase"].table("brains").insert({"name": self.name}).execute()
response = (
commons["supabase"].table("brains").insert({"name": self.name}).execute()
)
# set the brainId with response.data

self.id = response.data[0]['brain_id']
self.id = response.data[0]["brain_id"]
return response.data

def create_brain_user(self, user_id: UUID, rights, default_brain):
commons = common_dependencies()
response = commons["supabase"].table("brains_users").insert({"brain_id": str(self.id), "user_id": str(user_id), "rights": rights, "default_brain": default_brain}).execute()

response = (
commons["supabase"]
.table("brains_users")
.insert(
{
"brain_id": str(self.id),
"user_id": str(user_id),
"rights": rights,
"default_brain": default_brain,
}
)
.execute()
)

return response.data

def create_brain_vector(self, vector_id, file_sha1):
response = (
self.commons["supabase"]
.table("brains_vectors")
.insert({"brain_id": str(self.id), "vector_id": str(vector_id), "file_sha1": file_sha1})
.insert(
{
"brain_id": str(self.id),
"vector_id": str(vector_id),
"file_sha1": file_sha1,
}
)
.execute()
)
return response.data
Expand Down Expand Up @@ -121,22 +177,22 @@ def get_unique_brain_files(self):
"""

response = (
self.commons["supabase"]
.from_("brains_vectors")
.select("vector_id")
.filter("brain_id", "eq", self.id)
.execute()
)
self.commons["supabase"]
.from_("brains_vectors")
.select("vector_id")
.filter("brain_id", "eq", self.id)
.execute()
)

vector_ids = [item["vector_id"] for item in response.data]

print('vector_ids', vector_ids)
print("vector_ids", vector_ids)

if len(vector_ids) == 0:
return []

self.files = self.get_unique_files_from_vector_ids(vector_ids)
print('unique_files', self.files)
print("unique_files", self.files)

return self.files

Expand All @@ -145,52 +201,84 @@ def get_unique_files_from_vector_ids(self, vectors_ids: List[int]):
"""
Retrieve unique user data vectors.
"""
print('vectors_ids', vectors_ids)
print('tuple(vectors_ids)', tuple(vectors_ids))
print("vectors_ids", vectors_ids)
print("tuple(vectors_ids)", tuple(vectors_ids))
if len(vectors_ids) == 1:
vectors_response = self.commons['supabase'].table("vectors").select(
"name:metadata->>file_name, size:metadata->>file_size", count="exact") \
.filter("id", "eq", vectors_ids[0])\
vectors_response = (
self.commons["supabase"]
.table("vectors")
.select(
"name:metadata->>file_name, size:metadata->>file_size",
count="exact",
)
.filter("id", "eq", vectors_ids[0])
.execute()
)
else:
vectors_response = self.commons['supabase'].table("vectors").select(
"name:metadata->>file_name, size:metadata->>file_size", count="exact") \
.filter("id", "in", tuple(vectors_ids))\
vectors_response = (
self.commons["supabase"]
.table("vectors")
.select(
"name:metadata->>file_name, size:metadata->>file_size",
count="exact",
)
.filter("id", "in", tuple(vectors_ids))
.execute()

)

documents = vectors_response.data # Access the data from the response
# Convert each dictionary to a tuple of items, then to a set to remove duplicates, and then back to a dictionary
unique_files = [dict(t) for t in set(tuple(d.items()) for d in documents)]
return unique_files

def delete_file_from_brain(self, file_name: str):
# First, get the vector_ids associated with the file_name
vector_response = self.commons["supabase"].table("vectors").select("id").filter("metadata->>file_name", "eq", file_name).execute()
vector_response = (
self.commons["supabase"]
.table("vectors")
.select("id")
.filter("metadata->>file_name", "eq", file_name)
.execute()
)
vector_ids = [item["id"] for item in vector_response.data]

# For each vector_id, delete the corresponding entry from the 'brains_vectors' table
for vector_id in vector_ids:
self.commons["supabase"].table("brains_vectors").delete().filter("vector_id", "eq", vector_id).filter("brain_id", "eq", self.id).execute()
self.commons["supabase"].table("brains_vectors").delete().filter(
"vector_id", "eq", vector_id
).filter("brain_id", "eq", self.id).execute()

# Check if the vector is still associated with any other brains
associated_brains_response = self.commons["supabase"].table("brains_vectors").select("brain_id").filter("vector_id", "eq", vector_id).execute()
associated_brains = [item["brain_id"] for item in associated_brains_response.data]
associated_brains_response = (
self.commons["supabase"]
.table("brains_vectors")
.select("brain_id")
.filter("vector_id", "eq", vector_id)
.execute()
)
associated_brains = [
item["brain_id"] for item in associated_brains_response.data
]

# If the vector is not associated with any other brains, delete it from 'vectors' table
if not associated_brains:
self.commons["supabase"].table("vectors").delete().filter("id", "eq", vector_id).execute()
self.commons["supabase"].table("vectors").delete().filter(
"id", "eq", vector_id
).execute()

return {"message": f"File {file_name} in brain {self.id} has been deleted."}


def get_default_user_brain(user: User):
commons = common_dependencies()
response = (
commons["supabase"]
.from_("brains_users") # I'm assuming this is the correct table
.select("brain_id")
.from_("brains_users") # I'm assuming this is the correct table
.select("brain_id")
.filter("user_id", "eq", user.id)
.filter("default_brain", "eq", True) # Assuming 'default' is the correct column name
.filter(
"default_brain", "eq", True
) # Assuming 'default' is the correct column name
.execute()
)

Expand All @@ -207,8 +295,7 @@ def get_default_user_brain(user: User):
.filter("brain_id", "eq", default_brain_id)
.execute()
)

return brain_response.data[0] if brain_response.data else None

return None

Empty file added backend/parsers/__init__.py
Empty file.
Empty file added backend/repository/__init__.py
Empty file.
Empty file.
3 changes: 2 additions & 1 deletion backend/routes/api_key_routes.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ class ApiKeyInfo(BaseModel):

class ApiKey(BaseModel):
api_key: str
key_id: str


api_key_router = APIRouter()
Expand Down Expand Up @@ -76,7 +77,7 @@ async def create_api_key(
return {"api_key": "Error creating new API key."}
logger.info(f"Created new API key for user {current_user.email}.")

return {"api_key": new_api_key}
return {"api_key": new_api_key, "key_id": str(new_key_id)}


@api_key_router.delete(
Expand Down
Loading

0 comments on commit e076bbe

Please sign in to comment.