Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 0 additions & 8 deletions backend/app/database/errors.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,13 +11,6 @@
logger = logging.getLogger(__name__)


async def _get_db() -> Generator:
"""Duplicate of app.dependencies.get_db(), but importing that causes circular import."""
mongo_client = motor.motor_asyncio.AsyncIOMotorClient(settings.MONGODB_URL)
db = mongo_client[settings.MONGO_DATABASE]
yield db


async def log_error(
exception: Exception,
resource: Optional[MongoDBRef] = None,
Expand All @@ -30,7 +23,6 @@ async def log_error(
resource -- if error relates to a specific resource, you can include it
user --- if error relates to actions performed by a user, you can include them
"""
db = _get_db()
message = str(exception)
trace = traceback.format_exc(exception, limit=4)

Expand Down
9 changes: 0 additions & 9 deletions backend/app/dependencies.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,15 +15,6 @@
from app.search.connect import connect_elasticsearch


async def get_db() -> Generator:
mongo_client = motor.motor_asyncio.AsyncIOMotorClient(settings.MONGODB_URL)
db = mongo_client[settings.MONGO_DATABASE]
if db is None:
raise HTTPException(status_code=503, detail="Service not available")
return
yield db


async def get_fs() -> Generator:
file_system = Minio(
settings.MINIO_SERVER_URL,
Expand Down
32 changes: 9 additions & 23 deletions backend/app/deps/authorization_deps.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,11 @@
from beanie.operators import Or
from bson import ObjectId
from fastapi import Depends, HTTPException
from pymongo import MongoClient

from app.dependencies import get_db
from app.keycloak_auth import get_current_username
from app.models.authorization import RoleType, AuthorizationDB
from app.models.datasets import DatasetDB
from app.models.files import FileOut
from app.models.files import FileOut, FileDB
from app.models.groups import GroupOut, GroupDB
from app.models.metadata import MetadataDB
from app.models.pyobjectid import PyObjectId
Expand All @@ -32,13 +30,11 @@ async def get_role(

async def get_role_by_file(
file_id: str,
db: MongoClient = Depends(get_db),
current_user=Depends(get_current_username),
) -> RoleType:
if (file := await db["files"].find_one({"_id": ObjectId(file_id)})) is not None:
file_out = FileOut.from_mongo(file)
if (file := await FileDB.get(PydanticObjectId(file_id))) is not None:
authorization = await AuthorizationDB.find_one(
AuthorizationDB.dataset_id == file_out.dataset_id,
AuthorizationDB.dataset_id == file.dataset_id,
Or(
AuthorizationDB.creator == current_user,
AuthorizationDB.user_ids == current_user,
Expand All @@ -50,19 +46,15 @@ async def get_role_by_file(

async def get_role_by_metadata(
metadata_id: str,
db: MongoClient = Depends(get_db),
current_user=Depends(get_current_username),
) -> RoleType:
if (md_out := await MetadataDB.get(PydanticObjectId(metadata_id))) is not None:
resource_type = md_out.resource.collection
resource_id = md_out.resource.resource_id
if resource_type == "files":
if (
file := await db["files"].find_one({"_id": ObjectId(resource_id)})
) is not None:
file_out = FileOut.from_mongo(file)
if (file := await FileDB.get(PydanticObjectId(resource_id))) is not None:
authorization = await AuthorizationDB.find_one(
AuthorizationDB.dataset_id == file_out.dataset_id,
AuthorizationDB.dataset_id == file.dataset_id,
Or(
AuthorizationDB.creator == current_user,
AuthorizationDB.user_ids == current_user,
Expand Down Expand Up @@ -114,7 +106,6 @@ def __init__(self, role: str):
async def __call__(
self,
dataset_id: str,
db: MongoClient = Depends(get_db),
current_user: str = Depends(get_current_username),
):
# TODO: Make sure we enforce only one role per user per dataset, or find_one could yield wrong answer here.
Expand Down Expand Up @@ -150,13 +141,11 @@ def __init__(self, role: str):
async def __call__(
self,
file_id: str,
db: MongoClient = Depends(get_db),
current_user: str = Depends(get_current_username),
):
if (file := await db["files"].find_one({"_id": ObjectId(file_id)})) is not None:
file_out = FileOut.from_mongo(file)
if (file := await FileDB.get(PydanticObjectId(file_id))) is not None:
authorization = await AuthorizationDB.find_one(
AuthorizationDB.dataset_id == file_out.dataset_id,
AuthorizationDB.dataset_id == file.dataset_id,
Or(
AuthorizationDB.creator == current_user,
AuthorizationDB.user_ids == current_user,
Expand All @@ -183,19 +172,17 @@ def __init__(self, role: str):
async def __call__(
self,
metadata_id: str,
db: MongoClient = Depends(get_db),
current_user: str = Depends(get_current_username),
):
if (md_out := await MetadataDB.get(PydanticObjectId(metadata_id))) is not None:
resource_type = md_out.resource.collection
resource_id = md_out.resource.resource_id
if resource_type == "files":
if (
file := await db["files"].find_one({"_id": ObjectId(resource_id)})
file := await FileDB.get(PydanticObjectId(resource_id))
) is not None:
file_out = FileOut.from_mongo(file)
authorization = await AuthorizationDB.find_one(
AuthorizationDB.dataset_id == file_out.dataset_id,
AuthorizationDB.dataset_id == file.dataset_id,
Or(
AuthorizationDB.creator == current_user,
AuthorizationDB.user_ids == current_user,
Expand Down Expand Up @@ -246,7 +233,6 @@ def __init__(self, role: str):
async def __call__(
self,
group_id: str,
db: MongoClient = Depends(get_db),
current_user: str = Depends(get_current_username),
):
if (group := await GroupDB.get(group_id)) is not None:
Expand Down
4 changes: 1 addition & 3 deletions backend/app/models/datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from typing import Optional, List

import pymongo
from beanie import Document, View, PydanticObjectId
from beanie import Document, View
from pydantic import BaseModel, Field

from app.models.authorization import RoleType, AuthorizationDB
Expand Down Expand Up @@ -56,8 +56,6 @@ class Settings:


class DatasetDBViewList(View, DatasetBase):
# FIXME This seems to be required to return _id. Otherwise _id is null in the response.
id: PydanticObjectId = Field(None, alias="_id")
creator: UserOut
created: datetime = Field(default_factory=datetime.utcnow)
modified: datetime = Field(default_factory=datetime.utcnow)
Expand Down
2 changes: 0 additions & 2 deletions backend/app/models/feeds.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,6 @@ class FeedIn(JobFeed):


class FeedDB(Document, JobFeed, Provenance):
id: PydanticObjectId = Field(None, alias="_id")

class Settings:
name = "feeds"
indexes = [
Expand Down
36 changes: 30 additions & 6 deletions backend/app/models/files.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
from datetime import datetime
from typing import Optional
from typing import Optional, List

from beanie import Document, PydanticObjectId
from beanie import Document, View
from pydantic import Field, BaseModel

from app.models.authorization import AuthorizationDB
from app.models.pyobjectid import PyObjectId
from app.models.users import UserOut

Expand All @@ -29,8 +30,6 @@ class FileVersion(BaseModel):


class FileVersionDB(Document, FileVersion):
id: PydanticObjectId = Field(None, alias="_id")

class Settings:
name = "file_versions"

Expand All @@ -44,7 +43,6 @@ class FileIn(FileBase):


class FileDB(Document, FileBase):
id: PydanticObjectId = Field(None, alias="_id")
creator: UserOut
created: datetime = Field(default_factory=datetime.utcnow)
version_id: str = "N/A"
Expand All @@ -60,5 +58,31 @@ class Settings:
name = "files"


class FileDBViewList(View, FileBase):
creator: UserOut
created: datetime = Field(default_factory=datetime.utcnow)
modified: datetime = Field(default_factory=datetime.utcnow)
auth: List[AuthorizationDB]

class Settings:
source = FileDB
name = "files_view"
pipeline = [
{
"$lookup": {
"from": "authorization",
"localField": "dataset_id",
"foreignField": "dataset_id",
"as": "auth",
}
},
]
# Needs fix to work https://github.com/roman-right/beanie/pull/521
# use_cache = True
# cache_expiration_time = timedelta(seconds=10)
# cache_capacity = 5


class FileOut(FileDB):
pass
class Config:
fields = {"id": "id"}
34 changes: 30 additions & 4 deletions backend/app/models/folders.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
from datetime import datetime
from typing import Optional
from typing import Optional, List

from beanie import Document, PydanticObjectId
from beanie import Document, View
from pydantic import Field, BaseModel

from app.models.authorization import AuthorizationDB
from app.models.pyobjectid import PyObjectId
from app.models.users import UserOut

Expand All @@ -17,7 +18,6 @@ class FolderIn(FolderBase):


class FolderDB(Document, FolderBase):
id: PydanticObjectId = Field(None, alias="_id")
dataset_id: PyObjectId
parent_folder: Optional[PyObjectId]
creator: UserOut
Expand All @@ -28,5 +28,31 @@ class Settings:
name = "folders"


class FolderDBViewList(View, FolderBase):
creator: UserOut
created: datetime = Field(default_factory=datetime.utcnow)
modified: datetime = Field(default_factory=datetime.utcnow)
auth: List[AuthorizationDB]

class Settings:
source = FolderDB
name = "folders_view"
pipeline = [
{
"$lookup": {
"from": "authorization",
"localField": "dataset_id",
"foreignField": "dataset_id",
"as": "auth",
}
},
]
# Needs fix to work https://github.com/roman-right/beanie/pull/521
# use_cache = True
# cache_expiration_time = timedelta(seconds=10)
# cache_capacity = 5


class FolderOut(FolderDB):
pass
class Config:
fields = {"id": "id"}
8 changes: 0 additions & 8 deletions backend/app/models/listeners.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,6 @@ class LegacyEventListenerIn(ExtractorInfo):
class EventListenerDB(Document, EventListenerBase):
"""EventListeners have a name, version, author, description, and optionally properties where extractor_info will be saved."""

id: PydanticObjectId = Field(None, alias="_id")
creator: Optional[UserOut] = None
created: datetime = Field(default_factory=datetime.now)
modified: datetime = Field(default_factory=datetime.now)
Expand Down Expand Up @@ -132,8 +131,6 @@ class Config:
class EventListenerJobDB(Document, EventListenerJobBase):
"""This summarizes a submission to an extractor. All messages from that extraction should include this job's ID."""

id: PydanticObjectId = Field(None, alias="_id")

class Settings:
name = "listener_jobs"
indexes = [
Expand Down Expand Up @@ -175,7 +172,6 @@ class EventListenerDatasetJobMessage(BaseModel):
class EventListenerJobUpdateBase(BaseModel):
"""This is a status update message coming from the extractors back to Clowder."""

id: PydanticObjectId = Field(None, alias="_id")
job_id: str
timestamp: datetime = Field(default_factory=datetime.utcnow)
status: str
Expand All @@ -195,8 +191,6 @@ class Settings:
class EventListenerJobViewList(View, EventListenerJobBase):
"""Get associated resource information for each job"""

# FIXME This seems to be required to return _id. Otherwise _id is null in the response.
id: PydanticObjectId = Field(None, alias="_id")
creator: UserOut
created: datetime = Field(default_factory=datetime.utcnow)
modified: datetime = Field(default_factory=datetime.utcnow)
Expand Down Expand Up @@ -262,8 +256,6 @@ class Settings:
class EventListenerJobUpdateViewList(View, EventListenerJobUpdateBase):
"""Get associated resource information for each job update"""

# FIXME This seems to be required to return _id. Otherwise _id is null in the response.
id: PydanticObjectId = Field(None, alias="_id")
creator: UserOut
created: datetime = Field(default_factory=datetime.utcnow)
modified: datetime = Field(default_factory=datetime.utcnow)
Expand Down
2 changes: 0 additions & 2 deletions backend/app/models/metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,6 @@ class MetadataDefinitionIn(MetadataDefinitionBase):


class MetadataDefinitionDB(Document, MetadataDefinitionBase):
id: PydanticObjectId = Field(None, alias="_id")
creator: UserOut

class Settings:
Expand Down Expand Up @@ -228,7 +227,6 @@ class MetadataDelete(BaseModel):


class MetadataDB(Document, MetadataBase):
id: PydanticObjectId = Field(None, alias="_id")
resource: MongoDBRef
agent: MetadataAgent
created: datetime = Field(default_factory=datetime.utcnow)
Expand Down
11 changes: 5 additions & 6 deletions backend/app/models/users.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
from beanie import Document
from passlib.context import CryptContext
from pydantic import Field, EmailStr, BaseModel
from pymongo import MongoClient

pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")

Expand Down Expand Up @@ -33,6 +32,9 @@ class UserDB(Document, UserBase):
def verify_password(self, password):
return pwd_context.verify(password, self.hashed_password)

class Settings:
name = "users"


class UserOut(UserBase):
first_name: str
Expand Down Expand Up @@ -62,8 +64,5 @@ class UserAPIKeyOut(BaseModel):
created: datetime = Field(default_factory=datetime.utcnow)
expires: Optional[datetime] = None


async def get_user_out(user_id: str, db: MongoClient) -> UserOut:
"""Retrieve user from Mongo based on email address."""
user_out = await db["users"].find_one({"email": user_id})
return UserOut.from_mongo(user_out)
class Config:
fields: {"id": "id"}
Loading