From dc39e85c89d1ff4755bbd84b1e76d52e591fe352 Mon Sep 17 00:00:00 2001 From: Chen Wang Date: Tue, 25 Apr 2023 08:07:22 -0500 Subject: [PATCH 01/32] import container (#462) --- frontend/src/components/groups/AddMemberModal.tsx | 1 + 1 file changed, 1 insertion(+) diff --git a/frontend/src/components/groups/AddMemberModal.tsx b/frontend/src/components/groups/AddMemberModal.tsx index 4fff67827..51ded2412 100644 --- a/frontend/src/components/groups/AddMemberModal.tsx +++ b/frontend/src/components/groups/AddMemberModal.tsx @@ -2,6 +2,7 @@ import React, { useEffect, useState } from "react"; import { Autocomplete, Button, + Container, Dialog, DialogActions, DialogContent, From 8c7f72716dfef15aaea16e12363762077b92e9df Mon Sep 17 00:00:00 2001 From: Chen Wang Date: Tue, 25 Apr 2023 08:11:17 -0500 Subject: [PATCH 02/32] 446 gui to manage api keys (#465) * add new routes placeholder * add list and delete endpoint; inline docs * codegen and pytest * ugly ui? * onstart clear user keys * delete works but pytest doesn't work yet * pytest all works * codegen * redux * reducer and types * write the table * correctly wire in the apikey and error handling * wire in the delete modal of apikeys * wire in the creation api key * everything working in the gui * formatting --------- Co-authored-by: toddn --- backend/app/models/users.py | 12 +- backend/app/routers/users.py | 112 +++- backend/app/tests/test_apikey.py | 24 + backend/app/tests/utils.py | 13 +- frontend/src/actions/user.js | 45 +- frontend/src/components/ApiKeys/ApiKey.tsx | 214 ++++++++ .../CreateApiKeyModal.tsx} | 53 +- frontend/src/components/Layout.tsx | 14 +- frontend/src/openapi/v2/index.ts | 1 + .../src/openapi/v2/models/UserAPIKeyOut.ts | 11 + .../src/openapi/v2/services/UsersService.ts | 106 +++- frontend/src/reducers/user.ts | 17 +- frontend/src/routes.tsx | 211 +++++--- frontend/src/types/action.ts | 23 + frontend/src/types/data.ts | 4 +- scripts/mongoviews/mongo-init-dev.js | 506 +++++++++--------- scripts/mongoviews/mongo-init.js | 506 +++++++++--------- 17 files changed, 1194 insertions(+), 678 deletions(-) create mode 100644 backend/app/tests/test_apikey.py create mode 100644 frontend/src/components/ApiKeys/ApiKey.tsx rename frontend/src/components/{users/ApiKeyModal.tsx => ApiKeys/CreateApiKeyModal.tsx} (63%) create mode 100644 frontend/src/openapi/v2/models/UserAPIKeyOut.ts diff --git a/backend/app/models/users.py b/backend/app/models/users.py index 21fb5322b..cadb7c900 100644 --- a/backend/app/models/users.py +++ b/backend/app/models/users.py @@ -1,5 +1,6 @@ -from typing import Optional from datetime import datetime +from typing import Optional + from passlib.context import CryptContext from pydantic import Field, EmailStr, BaseModel from pymongo import MongoClient @@ -43,6 +44,15 @@ class UserAPIKey(MongoModel): """API keys can have a reference name (e.g. 'Uploader script')""" key: str + name: str + user: EmailStr + created: datetime = Field(default_factory=datetime.utcnow) + expires: Optional[datetime] = None + + +class UserAPIKeyOut(MongoModel): + # don't show the raw key + name: str user: EmailStr created: datetime = Field(default_factory=datetime.utcnow) expires: Optional[datetime] = None diff --git a/backend/app/routers/users.py b/backend/app/routers/users.py index 9c2ee94cc..478b6b14e 100644 --- a/backend/app/routers/users.py +++ b/backend/app/routers/users.py @@ -1,48 +1,50 @@ +from datetime import timedelta +from secrets import token_urlsafe from typing import List + from bson import ObjectId from fastapi import APIRouter, HTTPException, Depends -from pymongo import MongoClient -from datetime import datetime, timedelta from itsdangerous.url_safe import URLSafeSerializer -from itsdangerous.exc import BadSignature -from secrets import token_urlsafe +from pymongo import MongoClient, DESCENDING from app import dependencies from app.config import settings from app.keycloak_auth import get_current_username -from app.models.users import UserOut, UserAPIKey +from app.models.users import UserOut, UserAPIKey, UserAPIKeyOut router = APIRouter() -@router.get("", response_model=List[UserOut]) -async def get_users( - db: MongoClient = Depends(dependencies.get_db), skip: int = 0, limit: int = 2 +@router.get("/keys", response_model=List[UserAPIKeyOut]) +async def generate_user_api_key( + db: MongoClient = Depends(dependencies.get_db), + current_user=Depends(get_current_username), + skip: int = 0, + limit: int = 10, ): - users = [] - for doc in await db["users"].find().skip(skip).limit(limit).to_list(length=limit): - users.append(UserOut(**doc)) - return users - - -@router.get("/{user_id}", response_model=UserOut) -async def get_user(user_id: str, db: MongoClient = Depends(dependencies.get_db)): - if (user := await db["users"].find_one({"_id": ObjectId(user_id)})) is not None: - return UserOut.from_mongo(user) - raise HTTPException(status_code=404, detail=f"User {user_id} not found") + """List all api keys that user has created + Arguments: + skip: number of page to skip + limit: number to limit per page + """ + apikeys = [] + for doc in ( + await db["user_keys"] + .find({"user": current_user}) + .sort([("created", DESCENDING)]) + .skip(skip) + .limit(limit) + .to_list(length=limit) + ): + apikeys.append(UserAPIKeyOut.from_mongo(doc)) -@router.get("/username/{username}", response_model=UserOut) -async def get_user_by_name( - username: str, db: MongoClient = Depends(dependencies.get_db) -): - if (user := await db["users"].find_one({"email": username})) is not None: - return UserOut.from_mongo(user) - raise HTTPException(status_code=404, detail=f"User {username} not found") + return apikeys @router.post("/keys", response_model=str) async def generate_user_api_key( + name: str, mins: int = settings.local_auth_expiration, db: MongoClient = Depends(dependencies.get_db), current_user=Depends(get_current_username), @@ -50,15 +52,69 @@ async def generate_user_api_key( """Generate an API key that confers the user's privileges. Arguments: - mins -- number of minutes before expiration (0 for no expiration) + name: name of the api key + mins: number of minutes before expiration (0 for no expiration) """ serializer = URLSafeSerializer(settings.local_auth_secret, salt="api_key") unique_key = token_urlsafe(16) hashed_key = serializer.dumps({"user": current_user, "key": unique_key}) - user_key = UserAPIKey(user=current_user, key=unique_key) + user_key = UserAPIKey(user=current_user, key=unique_key, name=name) if mins > 0: user_key.expires = user_key.created + timedelta(minutes=mins) db["user_keys"].insert_one(user_key.to_mongo()) return hashed_key + + +@router.delete("/keys/{key_id}", response_model=UserAPIKeyOut) +async def generate_user_api_key( + key_id: str, + db: MongoClient = Depends(dependencies.get_db), + current_user=Depends(get_current_username), +): + """Delete API keys given ID + + Arguments: + key_id: id of the apikey + """ + apikey_doc = await db["user_keys"].find_one({"_id": ObjectId(key_id)}) + if apikey_doc is not None: + apikey = UserAPIKeyOut.from_mongo(apikey_doc) + + # Only allow user to delete their own key + if apikey.user == current_user: + await db["user_keys"].delete_one({"_id": ObjectId(key_id)}) + return apikey + else: + raise HTTPException( + status_code=403, detail=f"API key {key_id} not allowed to be deleted." + ) + else: + raise HTTPException(status_code=404, detail=f"API key {key_id} not found.") + + +@router.get("", response_model=List[UserOut]) +async def get_users( + db: MongoClient = Depends(dependencies.get_db), skip: int = 0, limit: int = 2 +): + users = [] + for doc in await db["users"].find().skip(skip).limit(limit).to_list(length=limit): + users.append(UserOut(**doc)) + return users + + +@router.get("/{user_id}", response_model=UserOut) +async def get_user(user_id: str, db: MongoClient = Depends(dependencies.get_db)): + if (user := await db["users"].find_one({"_id": ObjectId(user_id)})) is not None: + return UserOut.from_mongo(user) + raise HTTPException(status_code=404, detail=f"User {user_id} not found") + + +@router.get("/username/{username}", response_model=UserOut) +async def get_user_by_name( + username: str, db: MongoClient = Depends(dependencies.get_db) +): + if (user := await db["users"].find_one({"email": username})) is not None: + return UserOut.from_mongo(user) + raise HTTPException(status_code=404, detail=f"User {username} not found") diff --git a/backend/app/tests/test_apikey.py b/backend/app/tests/test_apikey.py new file mode 100644 index 000000000..a525f03ab --- /dev/null +++ b/backend/app/tests/test_apikey.py @@ -0,0 +1,24 @@ +from fastapi.testclient import TestClient + +from app.config import settings +from app.tests.utils import create_apikey + + +def test_create_apikey(client: TestClient, headers: dict): + hashed_key = create_apikey(client, headers) + assert hashed_key is not None + + +def test_list_apikeys(client: TestClient, headers: dict): + response = client.get(f"{settings.API_V2_STR}/users/keys", headers=headers) + assert response.status_code == 200 + + +def test_delete_apikeys(client: TestClient, headers: dict): + create_apikey(client, headers) + get_response = client.get(f"{settings.API_V2_STR}/users/keys", headers=headers) + key_id = get_response.json()[0].get("id") + delete_response = client.delete( + f"{settings.API_V2_STR}/users/keys/{key_id}", headers=headers + ) + assert delete_response.status_code == 200 diff --git a/backend/app/tests/utils.py b/backend/app/tests/utils.py index c7c45d32b..cef6bf3df 100644 --- a/backend/app/tests/utils.py +++ b/backend/app/tests/utils.py @@ -1,5 +1,7 @@ import os + from fastapi.testclient import TestClient + from app.config import settings """These are standard JSON entries to be used for creating test resources.""" @@ -69,7 +71,6 @@ "bibtex": [], } - """CONVENIENCE FUNCTIONS FOR COMMON ACTIONS REQUIRED BY TESTS.""" @@ -95,6 +96,16 @@ def get_user_token(client: TestClient, headers: dict, email: str = user_alt["ema return {"Authorization": "Bearer " + token} +def create_apikey(client: TestClient, headers: dict): + """create user generated API key""" + response = client.post( + f"{settings.API_V2_STR}/users/keys?name=pytest&mins=30", headers=headers + ) + assert response.status_code == 200 + assert response.json() is not None + return response.json() + + def create_group(client: TestClient, headers: dict): """Creates a test group (creator will be auto-added to members) and returns the JSON.""" response = client.post( diff --git a/frontend/src/actions/user.js b/frontend/src/actions/user.js index 2841f3879..8838ae907 100644 --- a/frontend/src/actions/user.js +++ b/frontend/src/actions/user.js @@ -1,6 +1,7 @@ import { V2 } from "../openapi"; import Cookies from "universal-cookie"; import config from "../app.config"; +import { handleErrors } from "./common"; const cookies = new Cookies(); @@ -126,25 +127,61 @@ export function fetchAllUsers(skip = 0, limit = 101) { }); }) .catch((reason) => { - dispatch(fetchAllUsers((skip = 0), (limit = 21))); + dispatch(fetchAllUsers(skip, limit)); + }); + }; +} + +export const LIST_API_KEYS = "LIST_API_KEYS"; + +export function listApiKeys(skip = 0, limit = 10) { + return (dispatch) => { + return V2.UsersService.generateUserApiKeyApiV2UsersKeysGet(skip, limit) + .then((json) => { + dispatch({ + type: LIST_API_KEYS, + apiKeys: json, + receivedAt: Date.now(), + }); + }) + .catch((reason) => { + dispatch(handleErrors(reason, listApiKeys(skip, limit))); }); }; } export const GENERATE_API_KEY = "GENERATE_API_KEY"; -export function generateApiKey(minutes = 30) { +export function generateApiKey(name = "", minutes = 30) { return (dispatch) => { - return V2.UsersService.generateUserApiKeyApiV2UsersKeysPost(minutes) + return V2.UsersService.generateUserApiKeyApiV2UsersKeysPost(name, minutes) .then((json) => { dispatch({ type: GENERATE_API_KEY, + hashedKey: json, + receivedAt: Date.now(), + }); + }) + .catch((reason) => { + dispatch(handleErrors(reason, generateApiKey(name, minutes))); + }); + }; +} + +export const DELETE_API_KEY = "DELETE_API_KEY"; + +export function deleteApiKey(keyId) { + return (dispatch) => { + return V2.UsersService.generateUserApiKeyApiV2UsersKeysKeyIdDelete(keyId) + .then((json) => { + dispatch({ + type: DELETE_API_KEY, apiKey: json, receivedAt: Date.now(), }); }) .catch((reason) => { - dispatch(generateApiKey((minutes = 30))); + dispatch(handleErrors(reason, deleteApiKey(keyId))); }); }; } diff --git a/frontend/src/components/ApiKeys/ApiKey.tsx b/frontend/src/components/ApiKeys/ApiKey.tsx new file mode 100644 index 000000000..a9b3769e7 --- /dev/null +++ b/frontend/src/components/ApiKeys/ApiKey.tsx @@ -0,0 +1,214 @@ +import React, { useEffect, useState } from "react"; +import { Box, Button, ButtonGroup, Grid, IconButton } from "@mui/material"; +import { RootState } from "../../types/data"; +import { useDispatch, useSelector } from "react-redux"; +import { ArrowBack, ArrowForward } from "@material-ui/icons"; +import Paper from "@mui/material/Paper"; +import Table from "@mui/material/Table"; +import TableHead from "@mui/material/TableHead"; +import TableRow from "@mui/material/TableRow"; +import TableCell from "@mui/material/TableCell"; +import TableBody from "@mui/material/TableBody"; +import TableContainer from "@mui/material/TableContainer"; +import Layout from "../Layout"; +import { MainBreadcrumbs } from "../navigation/BreadCrumb"; + +import { + deleteApiKey as deleteApiKeyAction, + listApiKeys as listApiKeysAction, +} from "../../actions/user"; +import DeleteIcon from "@mui/icons-material/Delete"; +import { theme } from "../../theme"; +import { parseDate } from "../../utils/common"; +import { ActionModal } from "../dialog/ActionModal"; +import VpnKeyIcon from "@mui/icons-material/VpnKey"; +import { CreateApiKeyModal } from "./CreateApiKeyModal"; + +export function ApiKeys() { + // Redux connect equivalent + const dispatch = useDispatch(); + const listApiKeys = (skip: number | undefined, limit: number | undefined) => + dispatch(listApiKeysAction(skip, limit)); + const deleteApiKey = (keyId: string) => dispatch(deleteApiKeyAction(keyId)); + + const apiKeys = useSelector((state: RootState) => state.user.apiKeys); + + // TODO add option to determine limit number; default show 5 tokens each time + const [currPageNum, setCurrPageNum] = useState(0); + const [limit] = useState(5); + const [skip, setSkip] = useState(0); + const [prevDisabled, setPrevDisabled] = useState(true); + const [nextDisabled, setNextDisabled] = useState(false); + const [selectedApikey, setSelectApikey] = useState(""); + const [deleteApikeyConfirmOpen, setDeleteApikeyConfirmOpen] = useState(false); + const [createApiKeyModalOpen, setCreateApiKeyModalOpen] = useState(false); + + // for breadcrumb + const paths = [ + { + name: "Explore", + url: "/", + }, + ]; + + // component did mount + useEffect(() => { + listApiKeys(skip, limit); + }, []); + + useEffect(() => { + // disable flipping if reaches the last page + if (apiKeys.length < limit) setNextDisabled(true); + else setNextDisabled(false); + }, [apiKeys]); + + useEffect(() => { + if (skip !== null && skip !== undefined) { + listApiKeys(skip, limit); + if (skip === 0) setPrevDisabled(true); + else setPrevDisabled(false); + } + }, [skip]); + + const previous = () => { + if (currPageNum - 1 >= 0) { + setSkip((currPageNum - 1) * limit); + setCurrPageNum(currPageNum - 1); + } + }; + const next = () => { + if (apiKeys.length === limit) { + setSkip((currPageNum + 1) * limit); + setCurrPageNum(currPageNum + 1); + } + }; + + return ( + + {/*breadcrumb*/} + + + + + + + + +
+ + {/*action modal*/} + { + deleteApiKey(selectedApikey); + setDeleteApikeyConfirmOpen(false); + }} + handleActionCancel={() => { + setDeleteApikeyConfirmOpen(false); + }} + /> + {/*create api key modal*/} + + {/*api key table*/} + + + + + API Key Name + Created at + Expired at + + + + + {apiKeys.map((apiKey) => { + return ( + + + {apiKey.name} + + + {parseDate(apiKey.created)} + + + {apiKey.expires === null + ? "Never" + : parseDate(apiKey.expires)} + + + { + setSelectApikey(apiKey.id); + setDeleteApikeyConfirmOpen(true); + }} + > + + + + + ); + })} + +
+ + + + + + +
+
+
+ ); +} diff --git a/frontend/src/components/users/ApiKeyModal.tsx b/frontend/src/components/ApiKeys/CreateApiKeyModal.tsx similarity index 63% rename from frontend/src/components/users/ApiKeyModal.tsx rename to frontend/src/components/ApiKeys/CreateApiKeyModal.tsx index b03ffb5fc..dd3295860 100644 --- a/frontend/src/components/users/ApiKeyModal.tsx +++ b/frontend/src/components/ApiKeys/CreateApiKeyModal.tsx @@ -6,12 +6,12 @@ import { DialogContent, DialogTitle, FormControl, - InputLabel, MenuItem, Select, } from "@mui/material"; import { generateApiKey as generateApiKeyAction, + listApiKeys as listApiKeysAction, resetApiKey as resetApiKeyAction, } from "../../actions/user"; import { useDispatch, useSelector } from "react-redux"; @@ -19,30 +19,43 @@ import { RootState } from "../../types/data"; import { ClowderMetadataTextField } from "../styledComponents/ClowderMetadataTextField"; import { ClowderFootnote } from "../styledComponents/ClowderFootnote"; import { CopyToClipboard } from "react-copy-to-clipboard"; +import { ClowderInputLabel } from "../styledComponents/ClowderInputLabel"; +import { ClowderInput } from "../styledComponents/ClowderInput"; type ApiKeyModalProps = { + skip: number | undefined; + limit: number; apiKeyModalOpen: boolean; setApiKeyModalOpen: any; }; -export const ApiKeyModal = (props: ApiKeyModalProps) => { - const { apiKeyModalOpen, setApiKeyModalOpen } = props; +export const CreateApiKeyModal = (props: ApiKeyModalProps) => { + const { skip, limit, apiKeyModalOpen, setApiKeyModalOpen } = props; const dispatch = useDispatch(); - const generateApiKey = (minutes: number) => - dispatch(generateApiKeyAction(minutes)); + const generateApiKey = (name: string, minutes: number) => + dispatch(generateApiKeyAction(name, minutes)); + const listApiKeys = (skip: number | undefined, limit: number | undefined) => + dispatch(listApiKeysAction(skip, limit)); const resetApiKey = () => dispatch(resetApiKeyAction()); - const apiKey = useSelector((state: RootState) => state.user.apiKey); + const hashedKey = useSelector((state: RootState) => state.user.hashedKey); + const [name, setName] = useState(""); const [minutes, setMinutes] = useState(30); const handleClose = () => { - resetApiKey(); setApiKeyModalOpen(false); + + // fetch latest api key list + listApiKeys(skip, limit); + resetApiKey(); + // reset + setName(""); + setMinutes(30); }; const handleGenerate = () => { - generateApiKey(minutes); + generateApiKey(name, minutes); }; const handleExpirationChange = (e) => { @@ -52,7 +65,7 @@ export const ApiKeyModal = (props: ApiKeyModalProps) => { return ( Your API Key - {apiKey ? ( + {hashedKey ? ( <> @@ -60,13 +73,13 @@ export const ApiKeyModal = (props: ApiKeyModalProps) => { this again. - + @@ -75,14 +88,26 @@ export const ApiKeyModal = (props: ApiKeyModalProps) => { ) : ( <> - Your API key will expire - After + Name + { + setName(event.target.value); + }} + defaultValue={name} + /> + + + + Expire after + + Owner + Editor + Uploader + Viewer + + + ) : ( + selectedRole + )} + {/*only owner or editor are allowed to modify roles of the member*/} + + {editRoleOn ? ( + + + + + + + + + ) : ( + { + setEditRoleOn(true); + }} + > + + + )} + + + + + + + + + + + + + ); +} diff --git a/frontend/src/components/sharing/SharingTab.tsx b/frontend/src/components/sharing/SharingTab.tsx index 3aa907a4a..999346c69 100644 --- a/frontend/src/components/sharing/SharingTab.tsx +++ b/frontend/src/components/sharing/SharingTab.tsx @@ -1,73 +1,114 @@ -import React, {useEffect, useState} from "react"; -import {RootState} from "../../types/data"; -import Card from '@mui/material/Card'; -import {fetchDatasetGroupsAndRoles, fetchDatasetUsersAndRoles} from "../../actions/dataset"; -import {useDispatch, useSelector} from "react-redux"; -import {useParams} from "react-router-dom"; -import TableContainer from "@mui/material/TableContainer"; -import Paper from "@mui/material/Paper"; -import Table from "@mui/material/Table"; -import TableHead from "@mui/material/TableHead"; -import TableRow from "@mui/material/TableRow"; -import TableCell from "@mui/material/TableCell"; -import TableBody from "@mui/material/TableBody"; -import {GroupAndRoleTable} from "./GroupAndRoleTable"; -import {UserAndRoleTable} from "./UserAndRoleTable"; -import {CardContent} from "@mui/material"; - +import React, { useEffect, useState } from "react"; +import { RootState } from "../../types/data"; +import Card from "@mui/material/Card"; +import { fetchDatasetRoles } from "../../actions/dataset"; +import { useDispatch, useSelector } from "react-redux"; +import { useParams } from "react-router-dom"; +import { GroupAndRoleTable } from "./GroupAndRoleTable"; +import { UserAndRoleTable } from "./UserAndRoleTable"; +import { Box, CardContent } from "@mui/material"; +import Typography from "@mui/material/Typography"; export const SharingTab = (): JSX.Element => { - - const {datasetId} = useParams<{ datasetId?: string }>(); + const { datasetId } = useParams<{ datasetId?: string }>(); const dispatch = useDispatch(); - const getUsersAndRoles = (datasetId: string | undefined) => dispatch(fetchDatasetUsersAndRoles(datasetId)); - const getGroupsAndRoles = (datasetId: string | undefined) => dispatch(fetchDatasetGroupsAndRoles(datasetId)); - const datasetUsersAndRolesList = useSelector((state: RootState) => state.dataset.usersAndRoles); - const datasetGroupsAndRolesList = useSelector((state: RootState) => state.dataset.groupsAndRoles); + const getRoles = (datasetId: string | undefined) => + dispatch(fetchDatasetRoles(datasetId)); + const datasetRolesList = useSelector( + (state: RootState) => state.dataset.roles + ); const [sharePaneOpen, setSharePaneOpen] = useState(false); const handleShareClose = () => { - setSharePaneOpen(false); - } - - + setSharePaneOpen(false); + }; useEffect(() => { - getUsersAndRoles(datasetId); - console.log('users and roles', datasetUsersAndRolesList); - + getRoles(datasetId); }, []); - useEffect(() => { - getGroupsAndRoles(datasetId); - console.log('groups and roles', datasetGroupsAndRolesList); - }, []); - - const clickButton = () => { // reset error message and close the error window - console.log('change role now'); - } + console.log("change role now"); + }; return ( <> -

Users and Roles

- + + + + + {"Users"} + + + + + - + -

Groups and Roles

- +
+ + + + + + {"Groups"} + + + + + - + - - ) - -} + ); +}; diff --git a/frontend/src/components/sharing/UserAndRoleTable.tsx b/frontend/src/components/sharing/UserAndRoleTable.tsx index 4f12d06a6..f3a80e730 100644 --- a/frontend/src/components/sharing/UserAndRoleTable.tsx +++ b/frontend/src/components/sharing/UserAndRoleTable.tsx @@ -1,85 +1,53 @@ -import React, {useEffect, useState} from "react"; -import {RootState} from "../../types/data"; -import {fetchDatasetGroupsAndRoles, fetchDatasetUsersAndRoles} from "../../actions/dataset"; -import {useDispatch, useSelector} from "react-redux"; -import {useParams} from "react-router-dom"; +import React from "react"; +import { RootState } from "../../types/data"; +import { useDispatch, useSelector } from "react-redux"; +import { useParams } from "react-router-dom"; import TableContainer from "@mui/material/TableContainer"; import Table from "@mui/material/Table"; import TableHead from "@mui/material/TableHead"; import TableRow from "@mui/material/TableRow"; import TableCell from "@mui/material/TableCell"; import TableBody from "@mui/material/TableBody"; -import ChangeDatasetRoleModal from "../datasets/ChangeDatasetRoleModal"; +import { theme } from "../../theme"; +import { UserAndRoleTableEntry } from "./UserAndRoleTableEntry"; +const iconStyle = { + verticalAlign: "middle", + color: theme.palette.primary.main, +}; export const UserAndRoleTable = (): JSX.Element => { - - const {datasetId} = useParams<{ datasetId?: string }>(); + const { datasetId } = useParams<{ datasetId?: string }>(); const dispatch = useDispatch(); - const getUsersAndRoles = (datasetId: string | undefined) => dispatch(fetchDatasetUsersAndRoles(datasetId)); - const datasetUsersAndRolesList = useSelector((state: RootState) => state.dataset.usersAndRoles); - const [sharePaneOpen, setSharePaneOpen] = useState(false); - - const handleShareClose = () => { - setSharePaneOpen(false); - } - - - - useEffect(() => { - getUsersAndRoles(datasetId); - console.log('users and roles', datasetUsersAndRolesList); - - }, []); - - - function clickButton(currentUserId, currentUserRole) { - // reset error message and close the error window - console.log(currentUserId, currentUserRole); - console.log('change role now for user'); - setSharePaneOpen(true); - } + const datasetRolesList = useSelector( + (state: RootState) => state.dataset.roles + ); return ( - <> -
- - - - - ID - Name - Role - Change Role - - - - { - datasetUsersAndRolesList.map((user_role) => ( - - {user_role.user_id} - {user_role.roleType} - {user_role.roleType} - - - - - )) - } - -
-
-
- - ) - -} + + + + + Name + Email + Role + + + + + {datasetRolesList !== undefined && + datasetRolesList.user_roles !== undefined ? ( + datasetRolesList.user_roles.map((user_role) => ( + // If user is in a group, omit from this table + + )) + ) : ( + <> + )} + +
+
+ ); +}; diff --git a/frontend/src/components/sharing/UserAndRoleTableEntry.tsx b/frontend/src/components/sharing/UserAndRoleTableEntry.tsx new file mode 100644 index 000000000..2bd06c85c --- /dev/null +++ b/frontend/src/components/sharing/UserAndRoleTableEntry.tsx @@ -0,0 +1,152 @@ +import React, { useState } from "react"; +import { RootState } from "../../types/data"; +import { useDispatch, useSelector } from "react-redux"; +import TableRow from "@mui/material/TableRow"; +import TableCell from "@mui/material/TableCell"; +import Gravatar from "react-gravatar"; +import PersonIcon from "@mui/icons-material/Person"; +import { + Button, + ButtonGroup, + FormControl, + IconButton, + InputLabel, + MenuItem, + Select, +} from "@mui/material"; +import { AuthWrapper } from "../auth/AuthWrapper"; +import CheckIcon from "@mui/icons-material/Check"; +import CloseIcon from "@mui/icons-material/Close"; +import EditIcon from "@mui/icons-material/Edit"; +import { theme } from "../../theme"; +import { UserAndRole } from "../../openapi/v2"; +import { setDatasetUserRole } from "../../actions/dataset"; +import { useParams } from "react-router-dom"; + +type UserAndRoleTableEntryProps = { + user_role: UserAndRole; +}; + +const iconStyle = { + verticalAlign: "middle", + color: theme.palette.primary.main, +}; + +export function UserAndRoleTableEntry(props: UserAndRoleTableEntryProps) { + const { user_role } = props; + const { datasetId } = useParams<{ datasetId?: string }>(); + + const dispatch = useDispatch(); + const datasetRole = useSelector( + (state: RootState) => state.dataset.datasetRole + ); + + const userRoleAssigned = ( + dataset_id: string | undefined, + username: string | undefined, + role: string | undefined + ) => dispatch(setDatasetUserRole(dataset_id, username, role)); + + const [selectedRole, setSelectedRole] = useState(user_role.role); + const [editRoleOn, setEditRoleOn] = useState(false); + + const handleRoleSelection = (e) => { + setSelectedRole(e.target.value); + }; + + // Resume to the current state in redux + const handleRoleCancel = () => { + setSelectedRole(selectedRole); + setEditRoleOn(false); + }; + + const handleRoleSave = () => { + userRoleAssigned(datasetId, user_role.user.email, selectedRole); + setEditRoleOn(false); + }; + + return ( + + + {user_role.user && user_role.user.email ? ( + + ) : ( + + )} + + + {user_role.user.email} + + {editRoleOn ? ( + + Role + + + ) : ( + selectedRole + )} + {/*only owner or editor are allowed to modify roles of the member*/} + + {editRoleOn ? ( + + + + + + + + + ) : ( + { + setEditRoleOn(true); + }} + > + + + )} + + + + + ); +} diff --git a/frontend/src/openapi/v2/index.ts b/frontend/src/openapi/v2/index.ts index fb6759aae..ca8c05754 100644 --- a/frontend/src/openapi/v2/index.ts +++ b/frontend/src/openapi/v2/index.ts @@ -17,6 +17,7 @@ export type { DatasetBase } from './models/DatasetBase'; export type { DatasetIn } from './models/DatasetIn'; export type { DatasetOut } from './models/DatasetOut'; export type { DatasetPatch } from './models/DatasetPatch'; +export type { DatasetRoles } from './models/DatasetRoles'; export type { EventListenerIn } from './models/EventListenerIn'; export type { EventListenerJob } from './models/EventListenerJob'; export type { EventListenerOut } from './models/EventListenerOut'; diff --git a/frontend/src/openapi/v2/models/DatasetRoles.ts b/frontend/src/openapi/v2/models/DatasetRoles.ts new file mode 100644 index 000000000..f4d4c601b --- /dev/null +++ b/frontend/src/openapi/v2/models/DatasetRoles.ts @@ -0,0 +1,13 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { GroupAndRole } from './GroupAndRole'; +import type { UserAndRole } from './UserAndRole'; + +export type DatasetRoles = { + id?: string; + dataset_id: string; + user_roles?: Array; + group_roles?: Array; +} diff --git a/frontend/src/openapi/v2/models/GroupAndRole.ts b/frontend/src/openapi/v2/models/GroupAndRole.ts index 601164801..bc712516b 100644 --- a/frontend/src/openapi/v2/models/GroupAndRole.ts +++ b/frontend/src/openapi/v2/models/GroupAndRole.ts @@ -2,8 +2,10 @@ /* tslint:disable */ /* eslint-disable */ +import type { GroupOut } from './GroupOut'; +import type { RoleType } from './RoleType'; + export type GroupAndRole = { - group_id: string; - group_name: string; - roleType: string; + group: GroupOut; + role: RoleType; } diff --git a/frontend/src/openapi/v2/models/UserAndRole.ts b/frontend/src/openapi/v2/models/UserAndRole.ts index 7e8508e0d..21376a759 100644 --- a/frontend/src/openapi/v2/models/UserAndRole.ts +++ b/frontend/src/openapi/v2/models/UserAndRole.ts @@ -2,7 +2,10 @@ /* tslint:disable */ /* eslint-disable */ +import type { RoleType } from './RoleType'; +import type { UserOut } from './UserOut'; + export type UserAndRole = { - user_id: string; - roleType: string; + user: UserOut; + role: RoleType; } diff --git a/frontend/src/openapi/v2/services/AuthorizationService.ts b/frontend/src/openapi/v2/services/AuthorizationService.ts index 159c20de4..6eb0966ff 100644 --- a/frontend/src/openapi/v2/services/AuthorizationService.ts +++ b/frontend/src/openapi/v2/services/AuthorizationService.ts @@ -4,9 +4,8 @@ import type { AuthorizationBase } from '../models/AuthorizationBase'; import type { AuthorizationDB } from '../models/AuthorizationDB'; import type { AuthorizationMetadata } from '../models/AuthorizationMetadata'; -import type { GroupAndRole } from '../models/GroupAndRole'; +import type { DatasetRoles } from '../models/DatasetRoles'; import type { RoleType } from '../models/RoleType'; -import type { UserAndRole } from '../models/UserAndRole'; import type { CancelablePromise } from '../core/CancelablePromise'; import { request as __request } from '../core/request'; @@ -240,38 +239,18 @@ export class AuthorizationService { } /** - * Get Dataset Users And Roles - * Returns a list of UserAndRole objects. These show what users have what permission on a dataset + * Get Dataset Roles + * Get a list of all users and groups that have assigned roles on this dataset. * @param datasetId - * @returns UserAndRole Successful Response + * @returns DatasetRoles Successful Response * @throws ApiError */ - public static getDatasetUsersAndRolesApiV2AuthorizationsDatasetsDatasetIdUsersAndRolesGet( + public static getDatasetRolesApiV2AuthorizationsDatasetsDatasetIdRolesGet( datasetId: string, - ): CancelablePromise> { + ): CancelablePromise { return __request({ method: 'GET', - path: `/api/v2/authorizations/datasets/${datasetId}/users_and_roles`, - errors: { - 422: `Validation Error`, - }, - }); - } - - /** - * Get Dataset Groups And Roles - * Returns a list of Group objects. These show what groups have what permission on a dataset Group and - * role has the id, name, and roleType - * @param datasetId - * @returns GroupAndRole Successful Response - * @throws ApiError - */ - public static getDatasetGroupsAndRolesApiV2AuthorizationsDatasetsDatasetIdGroupsAndRolesGet( - datasetId: string, - ): CancelablePromise> { - return __request({ - method: 'GET', - path: `/api/v2/authorizations/datasets/${datasetId}/groups_and_roles`, + path: `/api/v2/authorizations/datasets/${datasetId}/roles`, errors: { 422: `Validation Error`, }, diff --git a/frontend/src/reducers/dataset.ts b/frontend/src/reducers/dataset.ts index ca9a6b1b0..6a1bf8390 100644 --- a/frontend/src/reducers/dataset.ts +++ b/frontend/src/reducers/dataset.ts @@ -1,87 +1,95 @@ import { - RECEIVE_FILES_IN_DATASET, + CREATE_DATASET, + DELETE_DATASET, RECEIVE_DATASET_ABOUT, - UPDATE_DATASET, + RECEIVE_DATASET_ROLES, RECEIVE_DATASETS, - DELETE_DATASET, - CREATE_DATASET, + RECEIVE_FILES_IN_DATASET, RESET_CREATE_DATASET, - DOWNLOAD_DATASET, SET_DATASET_GROUP_ROLE, - SET_DATASET_USER_ROLE, - RECEIVE_DATASET_USERS_AND_ROLES, - RECEIVE_DATASET_GROUPS_AND_ROLES, + SET_DATASET_USER_ROLE, + UPDATE_DATASET, } from "../actions/dataset"; -import {CREATE_FILE, UPDATE_FILE, DELETE_FILE, RESET_CREATE_FILE} from "../actions/file"; -import {RECEIVE_DATASET_ROLE, -} from "../actions/authorization"; -import {DataAction} from "../types/action"; -import {Author, Dataset, DatasetState} from "../types/data"; -import {AuthorizationBase, GroupAndRole, UserAndRole, FileOut as File} from "../openapi/v2"; +import { + CREATE_FILE, + DELETE_FILE, + RESET_CREATE_FILE, + UPDATE_FILE, +} from "../actions/file"; +import { RECEIVE_DATASET_ROLE } from "../actions/authorization"; +import { DataAction } from "../types/action"; +import { Author, Dataset, DatasetState } from "../types/data"; +import { + AuthorizationBase, + DatasetRoles, + FileOut as File, +} from "../openapi/v2"; const defaultState: DatasetState = { files: [], - about: {"author":{}}, + about: { author: {} }, datasetRole: {}, datasets: [], newDataset: {}, newFile: {}, - groupsAndRoles: [], - usersAndRoles: [], + roles: {}, }; const dataset = (state = defaultState, action: DataAction) => { switch (action.type) { - case RECEIVE_FILES_IN_DATASET: - return Object.assign({}, state, {files: action.files}); - case DELETE_FILE: - return Object.assign({}, state, { - files: state.files.filter(file => file.id !== action.file.id), - }); - // TODO rethink the pattern for file creation - // case CREATE_FILE: - // return Object.assign({}, state, { - // files: [...state.files, action.file] - // }); - case CREATE_FILE: - return Object.assign({}, state, { - newFile: action.file - }); - case RESET_CREATE_FILE: - return Object.assign({}, state, {newFile: {}}) - case SET_DATASET_GROUP_ROLE: - return Object.assign({}, state, {}) - case SET_DATASET_USER_ROLE: - return Object.assign({}, state, {}) - case UPDATE_FILE: - return Object.assign({}, state, { - files: state.files.map(file => file.id === action.file.id ? action.file: file), - }); - case RECEIVE_DATASET_ABOUT: - return Object.assign({}, state, {about: action.about}); - case RECEIVE_DATASET_ROLE: - return Object.assign({}, state, {datasetRole: action.role}); - case RECEIVE_DATASET_GROUPS_AND_ROLES: - return Object.assign({}, state, {groupsAndRoles: action.groupsAndRoles}); - case RECEIVE_DATASET_USERS_AND_ROLES: - return Object.assign({}, state, {usersAndRoles: action.usersAndRoles}); - case UPDATE_DATASET: - return Object.assign({}, state, {about: action.about}); - case RECEIVE_DATASETS: - return Object.assign({}, state, {datasets: action.datasets}); - case CREATE_DATASET: - return Object.assign({}, state, {newDataset: action.dataset}); - case RESET_CREATE_DATASET: - return Object.assign({}, state, {newDataset: {}}); - case DELETE_DATASET: - return Object.assign({}, state, { - datasets: state.datasets.filter(dataset => dataset.id !== action.dataset.id), - }); - // case DOWNLOAD_DATASET: - // // TODO do nothing for now; but in the future can utilize to display certain effects - // return Object.assign({}, state, {}); - default: - return state; + case RECEIVE_FILES_IN_DATASET: + return Object.assign({}, state, { files: action.files }); + case DELETE_FILE: + return Object.assign({}, state, { + files: state.files.filter((file) => file.id !== action.file.id), + }); + // TODO rethink the pattern for file creation + // case CREATE_FILE: + // return Object.assign({}, state, { + // files: [...state.files, action.file] + // }); + case CREATE_FILE: + return Object.assign({}, state, { + newFile: action.file, + }); + case RESET_CREATE_FILE: + return Object.assign({}, state, { newFile: {} }); + case SET_DATASET_GROUP_ROLE: + return Object.assign({}, state, {}); + case SET_DATASET_USER_ROLE: + return Object.assign({}, state, {}); + case UPDATE_FILE: + return Object.assign({}, state, { + files: state.files.map((file) => + file.id === action.file.id ? action.file : file + ), + }); + case RECEIVE_DATASET_ABOUT: + return Object.assign({}, state, { about: action.about }); + case RECEIVE_DATASET_ROLE: + return Object.assign({}, state, { datasetRole: action.role }); + return Object.assign({}, state, { datasetRole: action.role }); + case RECEIVE_DATASET_ROLES: + return Object.assign({}, state, { roles: action.roles }); + case UPDATE_DATASET: + return Object.assign({}, state, { about: action.about }); + case RECEIVE_DATASETS: + return Object.assign({}, state, { datasets: action.datasets }); + case CREATE_DATASET: + return Object.assign({}, state, { newDataset: action.dataset }); + case RESET_CREATE_DATASET: + return Object.assign({}, state, { newDataset: {} }); + case DELETE_DATASET: + return Object.assign({}, state, { + datasets: state.datasets.filter( + (dataset) => dataset.id !== action.dataset.id + ), + }); + // case DOWNLOAD_DATASET: + // // TODO do nothing for now; but in the future can utilize to display certain effects + // return Object.assign({}, state, {}); + default: + return state; } }; diff --git a/frontend/src/types/action.ts b/frontend/src/types/action.ts index ab3164623..9579e8c6a 100644 --- a/frontend/src/types/action.ts +++ b/frontend/src/types/action.ts @@ -7,14 +7,13 @@ import { } from "./data"; import { AuthorizationBase, + DatasetRoles, FileOut as FileSummary, FileVersion, - GroupAndRole, GroupOut as Group, MetadataDefinitionOut as MetadataDefinition, MetadataOut as Metadata, RoleType, - UserAndRole, UserAPIKeyOut, UserOut, } from "../openapi/v2"; @@ -41,14 +40,9 @@ interface RECEIVE_DATASET_ROLE { type: "RECEIVE_DATASET_ROLE"; } -interface RECEIVE_DATASET_GROUPS_AND_ROLES { - groupsAndRoles: GroupAndRole[]; - type: "RECEIVE_DATASET_GROUPS_AND_ROLES"; -} - -interface RECEIVE_DATASET_USERS_AND_ROLES { - usersAndRoles: UserAndRole[]; - type: "RECEIVE_DATASET_USERS_AND_ROLES"; +interface RECEIVE_DATASET_ROLES { + roles: DatasetRoles; + type: "RECEIVE_DATASET_ROLES"; } interface RECEIVE_FILE_ROLE { @@ -421,5 +415,4 @@ export type DataAction = | ADD_GROUP_MEMBER | ASSIGN_GROUP_MEMBER_ROLE | LIST_USERS - | RECEIVE_DATASET_GROUPS_AND_ROLES - | RECEIVE_DATASET_USERS_AND_ROLES; + | RECEIVE_DATASET_ROLES; diff --git a/frontend/src/types/data.ts b/frontend/src/types/data.ts index 742111dc4..bbbffbff5 100644 --- a/frontend/src/types/data.ts +++ b/frontend/src/types/data.ts @@ -1,15 +1,14 @@ import { AuthorizationBase, + DatasetRoles, EventListenerJob, FileOut as FileSummary, FileVersion, FolderOut, - GroupAndRole, GroupOut, MetadataDefinitionOut, MetadataOut as Metadata, RoleType, - UserAndRole, UserAPIKeyOut, UserOut, } from "../openapi/v2"; @@ -145,8 +144,7 @@ export interface DatasetState { newFile: FileSummary; about: Dataset; datasetRole: AuthorizationBase; - groupsAndRoles: GroupAndRole[]; - usersAndRoles: UserAndRole[]; + roles: DatasetRoles; } export interface ListenerState { From c5bd3853f7b9d7ce45f1d09ece626ad40f31d2ef Mon Sep 17 00:00:00 2001 From: Max Burnette Date: Wed, 26 Apr 2023 13:29:08 -0500 Subject: [PATCH 04/32] Fix reload bug (#451) --- backend/app/keycloak_auth.py | 13 +++++++------ backend/app/routers/datasets.py | 9 ++++----- backend/app/routers/keycloak.py | 14 +++++--------- .../src/openapi/v2/services/DatasetsService.ts | 4 ++-- 4 files changed, 18 insertions(+), 22 deletions(-) diff --git a/backend/app/keycloak_auth.py b/backend/app/keycloak_auth.py index 8cd929112..75ef3af30 100644 --- a/backend/app/keycloak_auth.py +++ b/backend/app/keycloak_auth.py @@ -2,17 +2,18 @@ import json import logging from datetime import datetime + from bson import ObjectId +from fastapi import Security, HTTPException, Depends from fastapi.security import OAuth2AuthorizationCodeBearer, APIKeyHeader -from jose import ExpiredSignatureError, jwt -from keycloak.keycloak_openid import KeycloakOpenID +from itsdangerous.exc import BadSignature +from itsdangerous.url_safe import URLSafeSerializer +from jose import ExpiredSignatureError from keycloak.exceptions import KeycloakAuthenticationError, KeycloakGetError from keycloak.keycloak_admin import KeycloakAdmin -from pymongo import MongoClient -from fastapi import Security, HTTPException, status, Depends +from keycloak.keycloak_openid import KeycloakOpenID from pydantic import Json -from itsdangerous.url_safe import URLSafeSerializer -from itsdangerous.exc import BadSignature +from pymongo import MongoClient from . import dependencies from .config import settings diff --git a/backend/app/routers/datasets.py b/backend/app/routers/datasets.py index 7f1718b76..f03d00a7f 100644 --- a/backend/app/routers/datasets.py +++ b/backend/app/routers/datasets.py @@ -7,6 +7,7 @@ import zipfile from collections.abc import Mapping, Iterable from typing import List, Optional, Union + from bson import ObjectId from bson import json_util from elasticsearch import Elasticsearch @@ -26,10 +27,11 @@ from app import dependencies from app import keycloak_auth -from app.deps.authorization_deps import Authorization from app.config import settings +from app.deps.authorization_deps import Authorization from app.keycloak_auth import get_token from app.keycloak_auth import get_user, get_current_user +from app.models.authorization import AuthorizationDB, RoleType from app.models.datasets import ( DatasetBase, DatasetIn, @@ -43,15 +45,12 @@ from app.models.users import UserOut from app.rabbitmq.listeners import submit_dataset_job from app.routers.files import add_file_entry, remove_file_entry - from app.search.connect import ( - connect_elasticsearch, insert_record, delete_document_by_id, delete_document_by_query, update_record, ) -from app.models.authorization import AuthorizationDB, RoleType router = APIRouter() security = HTTPBearer() @@ -289,7 +288,7 @@ async def get_dataset( raise HTTPException(status_code=404, detail=f"Dataset {dataset_id} not found") -@router.get("/{dataset_id}/files") +@router.get("/{dataset_id}/files", response_model=List[FileOut]) async def get_dataset_files( dataset_id: str, folder_id: Optional[str] = None, diff --git a/backend/app/routers/keycloak.py b/backend/app/routers/keycloak.py index a3bddbcdf..c3390c98e 100644 --- a/backend/app/routers/keycloak.py +++ b/backend/app/routers/keycloak.py @@ -1,28 +1,24 @@ import json +import logging import requests from bson import ObjectId from fastapi import APIRouter, HTTPException, Depends, Security from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer -from jose import jwt, ExpiredSignatureError +from jose import jwt, ExpiredSignatureError, JWTError from keycloak.exceptions import KeycloakAuthenticationError, KeycloakGetError -from pydantic import Json from pymongo import MongoClient -from starlette import status from starlette.responses import RedirectResponse -from app import keycloak_auth, dependencies +from app import dependencies from app.config import settings from app.keycloak_auth import ( keycloak_openid, - get_token, - oauth2_scheme, get_idp_public_key, retreive_refresh_token, ) -from app.models.users import UserIn, UserDB from app.models.tokens import TokenDB -import logging +from app.models.users import UserIn, UserDB router = APIRouter() security = HTTPBearer() @@ -170,7 +166,7 @@ async def refresh_token( ) email = token_json["email"] return await retreive_refresh_token(email, db) - except ExpiredSignatureError: + except (ExpiredSignatureError, JWTError): # retreive the refresh token and try refresh email = jwt.get_unverified_claims(access_token)["email"] return await retreive_refresh_token(email, db) diff --git a/frontend/src/openapi/v2/services/DatasetsService.ts b/frontend/src/openapi/v2/services/DatasetsService.ts index 6f201f59f..fdb35ea11 100644 --- a/frontend/src/openapi/v2/services/DatasetsService.ts +++ b/frontend/src/openapi/v2/services/DatasetsService.ts @@ -148,7 +148,7 @@ export class DatasetsService { * @param folderId * @param skip * @param limit - * @returns any Successful Response + * @returns FileOut Successful Response * @throws ApiError */ public static getDatasetFilesApiV2DatasetsDatasetIdFilesGet( @@ -156,7 +156,7 @@ export class DatasetsService { folderId?: string, skip?: number, limit: number = 10, - ): CancelablePromise { + ): CancelablePromise> { return __request({ method: 'GET', path: `/api/v2/datasets/${datasetId}/files`, From a612602a2623a50b5df36404719cfa94d531ff52 Mon Sep 17 00:00:00 2001 From: Chen Wang Date: Wed, 26 Apr 2023 13:29:22 -0500 Subject: [PATCH 05/32] 461 fix library version in pipfile (#468) * fix pipfile version * regenerate the piplock file --- backend/Pipfile | 54 ++++++------ backend/Pipfile.lock | 190 +++++++++++++++++++++---------------------- 2 files changed, 122 insertions(+), 122 deletions(-) diff --git a/backend/Pipfile b/backend/Pipfile index 106882cde..e9360b975 100644 --- a/backend/Pipfile +++ b/backend/Pipfile @@ -4,35 +4,35 @@ verify_ssl = true name = "pypi" [packages] -fastapi = "*" -uvicorn = "*" -motor = "*" -mongoengine = "*" -beanie = "*" -passlib = "*" -bcrypt = "*" -pyjwt = "*" -minio = "*" -python-multipart = "*" -pipfile = "*" -email-validator = "*" -python-keycloak = "*" -pika = "*" -aio-pika = "*" -elasticsearch = "*" -pipenv = "*" -install = "*" -rocrate = "*" -httpx = "*" -packaging = "*" -itsdangerous = "*" +fastapi = "0.95.1" +uvicorn = "0.21.1" +motor = "3.1.2" +mongoengine = "0.27.0" +beanie = "1.18.0" +passlib = "1.7.4" +bcrypt = "4.0.1" +pyjwt = "2.6.0" +minio = "7.1.14" +python-multipart = "0.0.6" +pipfile = "0.0.2" +email-validator = "2.0.0.post2" +python-keycloak = "2.15.3" +pika = "1.3.1" +aio-pika = "9.0.5" +elasticsearch = "8.7.0" +pipenv = "2023.4.20" +install = "1.3.5" +rocrate = "0.7.0" +httpx = "0.24.0" +packaging = "23.1" +itsdangerous = "2.1.2" [dev-packages] -requests = "*" -pytest = "*" -pytest-asyncio = "*" -black = "*" -faker = "*" +requests = "2.28.2" +pytest = "7.3.1" +pytest-asyncio = "0.21.0" +black = "23.3.0" +faker = "18.4.0" [requires] python_version = "3.9" diff --git a/backend/Pipfile.lock b/backend/Pipfile.lock index 0a84f4a25..00417cc2a 100644 --- a/backend/Pipfile.lock +++ b/backend/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "4716ab5f91b78728399e2b2400d982e869fa38f0d2fcbb67b9fda7d34132fcad" + "sha256": "2939fda7f5fcd6bc893b250832e44b83e449dffe42d33b5f7ad10bebf6c7630e" }, "pipfile-spec": 6, "requires": { @@ -225,7 +225,7 @@ "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df", "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab" ], - "markers": "python_version >= '3.7'", + "markers": "python_full_version >= '3.7.0'", "version": "==3.1.0" }, "click": { @@ -317,7 +317,7 @@ "hashes": [ "sha256:b6558272656e6f279948ee76d9863b4c00f467ad59b2d1190ca2304e514f7ce9" ], - "markers": "python_version >= '3.5' and python_version < '4.0'", + "markers": "python_version >= '3.5' and python_version < '4'", "version": "==0.1.4" }, "gxformat2": { @@ -640,11 +640,11 @@ }, "platformdirs": { "hashes": [ - "sha256:d5b638ca397f25f979350ff789db335903d7ea010ab28903f57b27e1b16c2b08", - "sha256:ebe11c0d7a805086e99506aa331612429a72ca7cd52a1f0d277dc4adc20cb10e" + "sha256:64370d47dc3fca65b4879f89bdead8197e93e05d696d6d1816243ebae8595da5", + "sha256:ea61fd7b85554beecbbd3e9b37fb26689b227ffae38f73353cbcc1cf8bd01878" ], "markers": "python_version >= '3.7'", - "version": "==3.2.0" + "version": "==3.3.0" }, "pyasn1": { "hashes": [ @@ -882,7 +882,7 @@ "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa", "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf" ], - "markers": "python_version >= '3.7' and python_version < '4.0'", + "markers": "python_version >= '3.7' and python_version < '4'", "version": "==2.28.2" }, "requests-toolbelt": { @@ -906,7 +906,7 @@ "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7", "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21" ], - "markers": "python_version >= '3.6' and python_version < '4.0'", + "markers": "python_version >= '3.6' and python_version < '4'", "version": "==4.9" }, "ruamel.yaml": { @@ -914,7 +914,7 @@ "sha256:742b35d3d665023981bd6d16b3d24248ce5df75fdb4e2924e93a05c1f8b61ca7", "sha256:8b7ce697a2f212752a35c1ac414471dc16c424c9573be4926b56ff3f5d23b7af" ], - "markers": "python_version >= '3.7'", + "markers": "python_version >= '3'", "version": "==0.17.21" }, "ruamel.yaml.clib": { @@ -1009,11 +1009,11 @@ }, "setuptools": { "hashes": [ - "sha256:6f0839fbdb7e3cfef1fc38d7954f5c1c26bf4eebb155a55c9bf8faf997b9fb67", - "sha256:bb16732e8eb928922eabaa022f881ae2b7cdcfaf9993ef1f5e841a96d32b8e0c" + "sha256:23aaf86b85ca52ceb801d32703f12d77517b2556af839621c641fca11287952b", + "sha256:f104fa03692a2602fa0fec6c6a9e63b6c8a968de13e17c026957dd1f53d80990" ], "markers": "python_version >= '3.7'", - "version": "==67.7.1" + "version": "==67.7.2" }, "six": { "hashes": [ @@ -1103,83 +1103,83 @@ }, "yarl": { "hashes": [ - "sha256:009a028127e0a1755c38b03244c0bea9d5565630db9c4cf9572496e947137a87", - "sha256:0414fd91ce0b763d4eadb4456795b307a71524dbacd015c657bb2a39db2eab89", - "sha256:0978f29222e649c351b173da2b9b4665ad1feb8d1daa9d971eb90df08702668a", - "sha256:0ef8fb25e52663a1c85d608f6dd72e19bd390e2ecaf29c17fb08f730226e3a08", - "sha256:10b08293cda921157f1e7c2790999d903b3fd28cd5c208cf8826b3b508026996", - "sha256:1684a9bd9077e922300ecd48003ddae7a7474e0412bea38d4631443a91d61077", - "sha256:1b372aad2b5f81db66ee7ec085cbad72c4da660d994e8e590c997e9b01e44901", - "sha256:1e21fb44e1eff06dd6ef971d4bdc611807d6bd3691223d9c01a18cec3677939e", - "sha256:2305517e332a862ef75be8fad3606ea10108662bc6fe08509d5ca99503ac2aee", - "sha256:24ad1d10c9db1953291f56b5fe76203977f1ed05f82d09ec97acb623a7976574", - "sha256:272b4f1599f1b621bf2aabe4e5b54f39a933971f4e7c9aa311d6d7dc06965165", - "sha256:2a1fca9588f360036242f379bfea2b8b44cae2721859b1c56d033adfd5893634", - "sha256:2b4fa2606adf392051d990c3b3877d768771adc3faf2e117b9de7eb977741229", - "sha256:3150078118f62371375e1e69b13b48288e44f6691c1069340081c3fd12c94d5b", - "sha256:326dd1d3caf910cd26a26ccbfb84c03b608ba32499b5d6eeb09252c920bcbe4f", - "sha256:34c09b43bd538bf6c4b891ecce94b6fa4f1f10663a8d4ca589a079a5018f6ed7", - "sha256:388a45dc77198b2460eac0aca1efd6a7c09e976ee768b0d5109173e521a19daf", - "sha256:3adeef150d528ded2a8e734ebf9ae2e658f4c49bf413f5f157a470e17a4a2e89", - "sha256:3edac5d74bb3209c418805bda77f973117836e1de7c000e9755e572c1f7850d0", - "sha256:3f6b4aca43b602ba0f1459de647af954769919c4714706be36af670a5f44c9c1", - "sha256:3fc056e35fa6fba63248d93ff6e672c096f95f7836938241ebc8260e062832fe", - "sha256:418857f837347e8aaef682679f41e36c24250097f9e2f315d39bae3a99a34cbf", - "sha256:42430ff511571940d51e75cf42f1e4dbdded477e71c1b7a17f4da76c1da8ea76", - "sha256:44ceac0450e648de86da8e42674f9b7077d763ea80c8ceb9d1c3e41f0f0a9951", - "sha256:47d49ac96156f0928f002e2424299b2c91d9db73e08c4cd6742923a086f1c863", - "sha256:48dd18adcf98ea9cd721a25313aef49d70d413a999d7d89df44f469edfb38a06", - "sha256:49d43402c6e3013ad0978602bf6bf5328535c48d192304b91b97a3c6790b1562", - "sha256:4d04acba75c72e6eb90745447d69f84e6c9056390f7a9724605ca9c56b4afcc6", - "sha256:57a7c87927a468e5a1dc60c17caf9597161d66457a34273ab1760219953f7f4c", - "sha256:58a3c13d1c3005dbbac5c9f0d3210b60220a65a999b1833aa46bd6677c69b08e", - "sha256:5df5e3d04101c1e5c3b1d69710b0574171cc02fddc4b23d1b2813e75f35a30b1", - "sha256:63243b21c6e28ec2375f932a10ce7eda65139b5b854c0f6b82ed945ba526bff3", - "sha256:64dd68a92cab699a233641f5929a40f02a4ede8c009068ca8aa1fe87b8c20ae3", - "sha256:6604711362f2dbf7160df21c416f81fac0de6dbcf0b5445a2ef25478ecc4c778", - "sha256:6c4fcfa71e2c6a3cb568cf81aadc12768b9995323186a10827beccf5fa23d4f8", - "sha256:6d88056a04860a98341a0cf53e950e3ac9f4e51d1b6f61a53b0609df342cc8b2", - "sha256:705227dccbe96ab02c7cb2c43e1228e2826e7ead880bb19ec94ef279e9555b5b", - "sha256:728be34f70a190566d20aa13dc1f01dc44b6aa74580e10a3fb159691bc76909d", - "sha256:74dece2bfc60f0f70907c34b857ee98f2c6dd0f75185db133770cd67300d505f", - "sha256:75c16b2a900b3536dfc7014905a128a2bea8fb01f9ee26d2d7d8db0a08e7cb2c", - "sha256:77e913b846a6b9c5f767b14dc1e759e5aff05502fe73079f6f4176359d832581", - "sha256:7a66c506ec67eb3159eea5096acd05f5e788ceec7b96087d30c7d2865a243918", - "sha256:8c46d3d89902c393a1d1e243ac847e0442d0196bbd81aecc94fcebbc2fd5857c", - "sha256:93202666046d9edadfe9f2e7bf5e0782ea0d497b6d63da322e541665d65a044e", - "sha256:97209cc91189b48e7cfe777237c04af8e7cc51eb369004e061809bcdf4e55220", - "sha256:a48f4f7fea9a51098b02209d90297ac324241bf37ff6be6d2b0149ab2bd51b37", - "sha256:a783cd344113cb88c5ff7ca32f1f16532a6f2142185147822187913eb989f739", - "sha256:ae0eec05ab49e91a78700761777f284c2df119376e391db42c38ab46fd662b77", - "sha256:ae4d7ff1049f36accde9e1ef7301912a751e5bae0a9d142459646114c70ecba6", - "sha256:b05df9ea7496df11b710081bd90ecc3a3db6adb4fee36f6a411e7bc91a18aa42", - "sha256:baf211dcad448a87a0d9047dc8282d7de59473ade7d7fdf22150b1d23859f946", - "sha256:bb81f753c815f6b8e2ddd2eef3c855cf7da193b82396ac013c661aaa6cc6b0a5", - "sha256:bcd7bb1e5c45274af9a1dd7494d3c52b2be5e6bd8d7e49c612705fd45420b12d", - "sha256:bf071f797aec5b96abfc735ab97da9fd8f8768b43ce2abd85356a3127909d146", - "sha256:c15163b6125db87c8f53c98baa5e785782078fbd2dbeaa04c6141935eb6dab7a", - "sha256:cb6d48d80a41f68de41212f3dfd1a9d9898d7841c8f7ce6696cf2fd9cb57ef83", - "sha256:ceff9722e0df2e0a9e8a79c610842004fa54e5b309fe6d218e47cd52f791d7ef", - "sha256:cfa2bbca929aa742b5084fd4663dd4b87c191c844326fcb21c3afd2d11497f80", - "sha256:d617c241c8c3ad5c4e78a08429fa49e4b04bedfc507b34b4d8dceb83b4af3588", - "sha256:d881d152ae0007809c2c02e22aa534e702f12071e6b285e90945aa3c376463c5", - "sha256:da65c3f263729e47351261351b8679c6429151ef9649bba08ef2528ff2c423b2", - "sha256:de986979bbd87272fe557e0a8fcb66fd40ae2ddfe28a8b1ce4eae22681728fef", - "sha256:df60a94d332158b444301c7f569659c926168e4d4aad2cfbf4bce0e8fb8be826", - "sha256:dfef7350ee369197106805e193d420b75467b6cceac646ea5ed3049fcc950a05", - "sha256:e59399dda559688461762800d7fb34d9e8a6a7444fd76ec33220a926c8be1516", - "sha256:e6f3515aafe0209dd17fb9bdd3b4e892963370b3de781f53e1746a521fb39fc0", - "sha256:e7fd20d6576c10306dea2d6a5765f46f0ac5d6f53436217913e952d19237efc4", - "sha256:ebb78745273e51b9832ef90c0898501006670d6e059f2cdb0e999494eb1450c2", - "sha256:efff27bd8cbe1f9bd127e7894942ccc20c857aa8b5a0327874f30201e5ce83d0", - "sha256:f37db05c6051eff17bc832914fe46869f8849de5b92dc4a3466cd63095d23dfd", - "sha256:f8ca8ad414c85bbc50f49c0a106f951613dfa5f948ab69c10ce9b128d368baf8", - "sha256:fb742dcdd5eec9f26b61224c23baea46c9055cf16f62475e11b9b15dfd5c117b", - "sha256:fc77086ce244453e074e445104f0ecb27530d6fd3a46698e33f6c38951d5a0f1", - "sha256:ff205b58dc2929191f68162633d5e10e8044398d7a45265f90a0f1d51f85f72c" + "sha256:04ab9d4b9f587c06d801c2abfe9317b77cdf996c65a90d5e84ecc45010823571", + "sha256:066c163aec9d3d073dc9ffe5dd3ad05069bcb03fcaab8d221290ba99f9f69ee3", + "sha256:13414591ff516e04fcdee8dc051c13fd3db13b673c7a4cb1350e6b2ad9639ad3", + "sha256:149ddea5abf329752ea5051b61bd6c1d979e13fbf122d3a1f9f0c8be6cb6f63c", + "sha256:159d81f22d7a43e6eabc36d7194cb53f2f15f498dbbfa8edc8a3239350f59fe7", + "sha256:1b1bba902cba32cdec51fca038fd53f8beee88b77efc373968d1ed021024cc04", + "sha256:22a94666751778629f1ec4280b08eb11815783c63f52092a5953faf73be24191", + "sha256:2a96c19c52ff442a808c105901d0bdfd2e28575b3d5f82e2f5fd67e20dc5f4ea", + "sha256:2b0738fb871812722a0ac2154be1f049c6223b9f6f22eec352996b69775b36d4", + "sha256:2c315df3293cd521033533d242d15eab26583360b58f7ee5d9565f15fee1bef4", + "sha256:32f1d071b3f362c80f1a7d322bfd7b2d11e33d2adf395cc1dd4df36c9c243095", + "sha256:3458a24e4ea3fd8930e934c129b676c27452e4ebda80fbe47b56d8c6c7a63a9e", + "sha256:38a3928ae37558bc1b559f67410df446d1fbfa87318b124bf5032c31e3447b74", + "sha256:3da8a678ca8b96c8606bbb8bfacd99a12ad5dd288bc6f7979baddd62f71c63ef", + "sha256:494053246b119b041960ddcd20fd76224149cfea8ed8777b687358727911dd33", + "sha256:50f33040f3836e912ed16d212f6cc1efb3231a8a60526a407aeb66c1c1956dde", + "sha256:52a25809fcbecfc63ac9ba0c0fb586f90837f5425edfd1ec9f3372b119585e45", + "sha256:53338749febd28935d55b41bf0bcc79d634881195a39f6b2f767870b72514caf", + "sha256:5415d5a4b080dc9612b1b63cba008db84e908b95848369aa1da3686ae27b6d2b", + "sha256:5610f80cf43b6202e2c33ba3ec2ee0a2884f8f423c8f4f62906731d876ef4fac", + "sha256:566185e8ebc0898b11f8026447eacd02e46226716229cea8db37496c8cdd26e0", + "sha256:56ff08ab5df8429901ebdc5d15941b59f6253393cb5da07b4170beefcf1b2528", + "sha256:59723a029760079b7d991a401386390c4be5bfec1e7dd83e25a6a0881859e716", + "sha256:5fcd436ea16fee7d4207c045b1e340020e58a2597301cfbcfdbe5abd2356c2fb", + "sha256:61016e7d582bc46a5378ffdd02cd0314fb8ba52f40f9cf4d9a5e7dbef88dee18", + "sha256:63c48f6cef34e6319a74c727376e95626f84ea091f92c0250a98e53e62c77c72", + "sha256:646d663eb2232d7909e6601f1a9107e66f9791f290a1b3dc7057818fe44fc2b6", + "sha256:662e6016409828ee910f5d9602a2729a8a57d74b163c89a837de3fea050c7582", + "sha256:674ca19cbee4a82c9f54e0d1eee28116e63bc6fd1e96c43031d11cbab8b2afd5", + "sha256:6a5883464143ab3ae9ba68daae8e7c5c95b969462bbe42e2464d60e7e2698368", + "sha256:6e7221580dc1db478464cfeef9b03b95c5852cc22894e418562997df0d074ccc", + "sha256:75df5ef94c3fdc393c6b19d80e6ef1ecc9ae2f4263c09cacb178d871c02a5ba9", + "sha256:783185c75c12a017cc345015ea359cc801c3b29a2966c2655cd12b233bf5a2be", + "sha256:822b30a0f22e588b32d3120f6d41e4ed021806418b4c9f0bc3048b8c8cb3f92a", + "sha256:8288d7cd28f8119b07dd49b7230d6b4562f9b61ee9a4ab02221060d21136be80", + "sha256:82aa6264b36c50acfb2424ad5ca537a2060ab6de158a5bd2a72a032cc75b9eb8", + "sha256:832b7e711027c114d79dffb92576acd1bd2decc467dec60e1cac96912602d0e6", + "sha256:838162460b3a08987546e881a2bfa573960bb559dfa739e7800ceeec92e64417", + "sha256:83fcc480d7549ccebe9415d96d9263e2d4226798c37ebd18c930fce43dfb9574", + "sha256:84e0b1599334b1e1478db01b756e55937d4614f8654311eb26012091be109d59", + "sha256:891c0e3ec5ec881541f6c5113d8df0315ce5440e244a716b95f2525b7b9f3608", + "sha256:8c2ad583743d16ddbdf6bb14b5cd76bf43b0d0006e918809d5d4ddf7bde8dd82", + "sha256:8c56986609b057b4839968ba901944af91b8e92f1725d1a2d77cbac6972b9ed1", + "sha256:8ea48e0a2f931064469bdabca50c2f578b565fc446f302a79ba6cc0ee7f384d3", + "sha256:8ec53a0ea2a80c5cd1ab397925f94bff59222aa3cf9c6da938ce05c9ec20428d", + "sha256:95d2ecefbcf4e744ea952d073c6922e72ee650ffc79028eb1e320e732898d7e8", + "sha256:9b3152f2f5677b997ae6c804b73da05a39daa6a9e85a512e0e6823d81cdad7cc", + "sha256:9bf345c3a4f5ba7f766430f97f9cc1320786f19584acc7086491f45524a551ac", + "sha256:a60347f234c2212a9f0361955007fcf4033a75bf600a33c88a0a8e91af77c0e8", + "sha256:a74dcbfe780e62f4b5a062714576f16c2f3493a0394e555ab141bf0d746bb955", + "sha256:a83503934c6273806aed765035716216cc9ab4e0364f7f066227e1aaea90b8d0", + "sha256:ac9bb4c5ce3975aeac288cfcb5061ce60e0d14d92209e780c93954076c7c4367", + "sha256:aff634b15beff8902d1f918012fc2a42e0dbae6f469fce134c8a0dc51ca423bb", + "sha256:b03917871bf859a81ccb180c9a2e6c1e04d2f6a51d953e6a5cdd70c93d4e5a2a", + "sha256:b124e2a6d223b65ba8768d5706d103280914d61f5cae3afbc50fc3dfcc016623", + "sha256:b25322201585c69abc7b0e89e72790469f7dad90d26754717f3310bfe30331c2", + "sha256:b7232f8dfbd225d57340e441d8caf8652a6acd06b389ea2d3222b8bc89cbfca6", + "sha256:b8cc1863402472f16c600e3e93d542b7e7542a540f95c30afd472e8e549fc3f7", + "sha256:b9a4e67ad7b646cd6f0938c7ebfd60e481b7410f574c560e455e938d2da8e0f4", + "sha256:be6b3fdec5c62f2a67cb3f8c6dbf56bbf3f61c0f046f84645cd1ca73532ea051", + "sha256:bf74d08542c3a9ea97bb8f343d4fcbd4d8f91bba5ec9d5d7f792dbe727f88938", + "sha256:c027a6e96ef77d401d8d5a5c8d6bc478e8042f1e448272e8d9752cb0aff8b5c8", + "sha256:c0c77533b5ed4bcc38e943178ccae29b9bcf48ffd1063f5821192f23a1bd27b9", + "sha256:c1012fa63eb6c032f3ce5d2171c267992ae0c00b9e164efe4d73db818465fac3", + "sha256:c3a53ba34a636a256d767c086ceb111358876e1fb6b50dfc4d3f4951d40133d5", + "sha256:d4e2c6d555e77b37288eaf45b8f60f0737c9efa3452c6c44626a5455aeb250b9", + "sha256:de119f56f3c5f0e2fb4dee508531a32b069a5f2c6e827b272d1e0ff5ac040333", + "sha256:e65610c5792870d45d7b68c677681376fcf9cc1c289f23e8e8b39c1485384185", + "sha256:e9fdc7ac0d42bc3ea78818557fab03af6181e076a2944f43c38684b4b6bed8e3", + "sha256:ee4afac41415d52d53a9833ebae7e32b344be72835bbb589018c9e938045a560", + "sha256:f364d3480bffd3aa566e886587eaca7c8c04d74f6e8933f3f2c996b7f09bee1b", + "sha256:f3b078dbe227f79be488ffcfc7a9edb3409d018e0952cf13f15fd6512847f3f7", + "sha256:f4e2d08f07a3d7d3e12549052eb5ad3eab1c349c53ac51c209a0e5991bbada78", + "sha256:f7a3d8146575e08c29ed1cd287068e6d02f1c7bdff8970db96683b9591b86ee7" ], "markers": "python_version >= '3.7'", - "version": "==1.8.2" + "version": "==1.9.2" } }, "develop": { @@ -1300,7 +1300,7 @@ "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df", "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab" ], - "markers": "python_version >= '3.7'", + "markers": "python_full_version >= '3.7.0'", "version": "==3.1.0" }, "click": { @@ -1321,11 +1321,11 @@ }, "faker": { "hashes": [ - "sha256:170ead9d0d140916168b142df69c44722b8f622ced2070802d0af9c476f0cb84", - "sha256:977ad0b7aa7a61ed57287d6a0723a827e9d3dd1f8cc82aaf08707f281b33bacc" + "sha256:137c6667583b0b458599b11305eed5a486e3932a14cb792b2b5b82ad1ad1a430", + "sha256:64e9ab619d75684cc0593aa9f336170b0b58fa77c07fc0ebc7b2b1258e53b67d" ], "index": "pypi", - "version": "==18.4.0" + "version": "==18.5.1" }, "idna": { "hashes": [ @@ -1369,11 +1369,11 @@ }, "platformdirs": { "hashes": [ - "sha256:d5b638ca397f25f979350ff789db335903d7ea010ab28903f57b27e1b16c2b08", - "sha256:ebe11c0d7a805086e99506aa331612429a72ca7cd52a1f0d277dc4adc20cb10e" + "sha256:64370d47dc3fca65b4879f89bdead8197e93e05d696d6d1816243ebae8595da5", + "sha256:ea61fd7b85554beecbbd3e9b37fb26689b227ffae38f73353cbcc1cf8bd01878" ], "markers": "python_version >= '3.7'", - "version": "==3.2.0" + "version": "==3.3.0" }, "pluggy": { "hashes": [ @@ -1412,7 +1412,7 @@ "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa", "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf" ], - "markers": "python_version >= '3.7' and python_version < '4.0'", + "markers": "python_version >= '3.7' and python_version < '4'", "version": "==2.28.2" }, "six": { From 931c4c4876c9ddbeb65f013ec6f922caedd29626 Mon Sep 17 00:00:00 2001 From: Aruna Parameswaran Date: Thu, 27 Apr 2023 10:38:26 -0500 Subject: [PATCH 06/32] 443 frontend need to display more verbose backend error (#458) * Updated frontend error message to be more verbose * Backend returns verbose logs, frontend displays it as generic messages depending on the HTTP status code * Updated the error message logic to include original backend logs wherever possible * Fixed report button issue * Rootcause: The onClick event for the report button has a semantic error, which is causing the issue reason string to be replaced with an object * Also added encode for the reason * Fixing issue with error message * Revert "Updated frontend error message to be more verbose" This reverts commit 7d230d3dd46738a2672901a03729343f944c2784. * Fixing issue with error message --- frontend/src/actions/common.js | 6 +++--- frontend/src/components/Explore.tsx | 6 ++++-- frontend/src/components/datasets/CreateDataset.tsx | 6 ++++-- frontend/src/components/datasets/Dataset.tsx | 4 ++-- frontend/src/components/files/File.tsx | 4 ++-- frontend/src/components/listeners/ExtractionHistory.tsx | 6 ++++-- 6 files changed, 19 insertions(+), 13 deletions(-) diff --git a/frontend/src/actions/common.js b/frontend/src/actions/common.js index dcbe6bbe9..6a5435bf1 100644 --- a/frontend/src/actions/common.js +++ b/frontend/src/actions/common.js @@ -64,7 +64,7 @@ export function handleErrors(reason, originalFunc){ return (dispatch) => { dispatch({ type: NOT_AUTHORIZED, - reason: "Forbidden", + reason: reason.body.detail, stack: reason.stack ? reason.stack : "", receivedAt: Date.now() }); @@ -73,7 +73,7 @@ export function handleErrors(reason, originalFunc){ return (dispatch) => { dispatch({ type: NOT_FOUND, - reason: "Not Found", + reason: reason.body.detail, stack: reason.stack ? reason.stack : "", receivedAt: Date.now() }); @@ -82,7 +82,7 @@ export function handleErrors(reason, originalFunc){ return (dispatch) => { dispatch({ type: FAILED, - reason: reason.message !== undefined? reason.message : "Backend Failure. Couldn't fetch!", + reason: reason.body.detail !== undefined? reason.body.detail : "Backend Failure. Couldn't fetch!", stack: reason.stack ? reason.stack : "", receivedAt: Date.now() }); diff --git a/frontend/src/components/Explore.tsx b/frontend/src/components/Explore.tsx index 9b9fb53d0..600a887d3 100644 --- a/frontend/src/components/Explore.tsx +++ b/frontend/src/components/Explore.tsx @@ -63,8 +63,10 @@ export const Explore = (): JSX.Element => { setErrorOpen(false); } const handleErrorReport = () => { - window.open(`${config.GHIssueBaseURL}+${reason}&body=${encodeURIComponent(stack)}`); - } + window.open( + `${config.GHIssueBaseURL}+${encodeURIComponent(reason)}&body=${encodeURIComponent(stack)}` + ); + }; // fetch thumbnails from each individual dataset/id calls useEffect(() => { diff --git a/frontend/src/components/datasets/CreateDataset.tsx b/frontend/src/components/datasets/CreateDataset.tsx index 9f0fb70d3..226cc6bdf 100644 --- a/frontend/src/components/datasets/CreateDataset.tsx +++ b/frontend/src/components/datasets/CreateDataset.tsx @@ -51,8 +51,10 @@ export const CreateDataset = (): JSX.Element => { setErrorOpen(false); } const handleErrorReport = () => { - window.open(`${config.GHIssueBaseURL}+${reason}&body=${encodeURIComponent(stack)}`); - } + window.open( + `${config.GHIssueBaseURL}+${encodeURIComponent(reason)}&body=${encodeURIComponent(stack)}` + ); + }; // step 1 const onDatasetSave = (formData:any) =>{ diff --git a/frontend/src/components/datasets/Dataset.tsx b/frontend/src/components/datasets/Dataset.tsx index 04e8049da..ee268e4dd 100644 --- a/frontend/src/components/datasets/Dataset.tsx +++ b/frontend/src/components/datasets/Dataset.tsx @@ -111,9 +111,9 @@ export const Dataset = (): JSX.Element => { dismissError(); setErrorOpen(false); }; - const handleErrorReport = (reason: string) => { + const handleErrorReport = () => { window.open( - `${config.GHIssueBaseURL}+${reason}&body=${encodeURIComponent(stack)}` + `${config.GHIssueBaseURL}+${encodeURIComponent(reason)}&body=${encodeURIComponent(stack)}` ); }; diff --git a/frontend/src/components/files/File.tsx b/frontend/src/components/files/File.tsx index f0578008f..6bca93ea6 100644 --- a/frontend/src/components/files/File.tsx +++ b/frontend/src/components/files/File.tsx @@ -106,9 +106,9 @@ export const File = (): JSX.Element => { dismissError(); setErrorOpen(false); }; - const handleErrorReport = (reason: string) => { + const handleErrorReport = () => { window.open( - `${config.GHIssueBaseURL}+${reason}&body=${encodeURIComponent(stack)}` + `${config.GHIssueBaseURL}+${encodeURIComponent(reason)}&body=${encodeURIComponent(stack)}` ); }; diff --git a/frontend/src/components/listeners/ExtractionHistory.tsx b/frontend/src/components/listeners/ExtractionHistory.tsx index 0c72098f3..9f46490bc 100644 --- a/frontend/src/components/listeners/ExtractionHistory.tsx +++ b/frontend/src/components/listeners/ExtractionHistory.tsx @@ -163,8 +163,10 @@ export const ExtractionHistory = (): JSX.Element => { setErrorOpen(false); } const handleErrorReport = () => { - window.open(`${config.GHIssueBaseURL}+${reason}&body=${encodeURIComponent(stack)}`); - } + window.open( + `${config.GHIssueBaseURL}+${encodeURIComponent(reason)}&body=${encodeURIComponent(stack)}` + ); + }; return ( From e1f8a4a3f22028fc5d22c7dcb8eb18d9553b0d9e Mon Sep 17 00:00:00 2001 From: Todd Nicholson <40038535+tcnichol@users.noreply.github.com> Date: Fri, 28 Apr 2023 09:06:13 -0500 Subject: [PATCH 07/32] 469 create profile page (#471) * profile page exists * new route for user profile profile page includes layout * codegen new route for get current user profile * we now get the profile, but nothing shows yet * page has fake data * we see the profile now * formatting --- backend/app/routers/users.py | 10 +++ frontend/src/actions/user.js | 18 ++++++ frontend/src/components/Layout.tsx | 7 +++ frontend/src/components/users/Profile.tsx | 63 ++++++++++++++++++- .../src/openapi/v2/services/UsersService.ts | 12 ++++ frontend/src/reducers/user.ts | 10 ++- frontend/src/routes.tsx | 10 ++- frontend/src/types/action.ts | 11 +++- frontend/src/types/data.ts | 8 +++ 9 files changed, 143 insertions(+), 6 deletions(-) diff --git a/backend/app/routers/users.py b/backend/app/routers/users.py index 478b6b14e..7eb34d6cd 100644 --- a/backend/app/routers/users.py +++ b/backend/app/routers/users.py @@ -104,6 +104,16 @@ async def get_users( return users +@router.get("/profile", response_model=UserOut) +async def get_profile( + username=Depends(get_current_username), + db: MongoClient = Depends(dependencies.get_db), +): + if (user := await db["users"].find_one({"email": username})) is not None: + return UserOut.from_mongo(user) + raise HTTPException(status_code=404, detail=f"User {username} not found") + + @router.get("/{user_id}", response_model=UserOut) async def get_user(user_id: str, db: MongoClient = Depends(dependencies.get_db)): if (user := await db["users"].find_one({"_id": ObjectId(user_id)})) is not None: diff --git a/frontend/src/actions/user.js b/frontend/src/actions/user.js index 8838ae907..e30f05e13 100644 --- a/frontend/src/actions/user.js +++ b/frontend/src/actions/user.js @@ -196,3 +196,21 @@ export function resetApiKey() { }); }; } + +export const RECEIVE_USER_PROFILE = "RECEIVE_USER_PROFILE"; + +export function fetchUserProfile() { + return (dispatch) => { + return V2.UsersService.getProfileApiV2UsersProfileGet() + .then((json) => { + dispatch({ + type: RECEIVE_USER_PROFILE, + profile: json, + receivedAt: Date.now(), + }); + }) + .catch((reason) => { + dispatch(handleErrors(reason, fetchUserProfile())); + }); + }; +} diff --git a/frontend/src/components/Layout.tsx b/frontend/src/components/Layout.tsx index 0246866ff..ca2ca6fd1 100644 --- a/frontend/src/components/Layout.tsx +++ b/frontend/src/components/Layout.tsx @@ -222,6 +222,13 @@ export default function PersistentDrawerLeft(props) { open={isMenuOpen} onClose={handleProfileMenuClose} > + + + + + User Profile + + diff --git a/frontend/src/components/users/Profile.tsx b/frontend/src/components/users/Profile.tsx index cb0ff5c3b..c8897896f 100644 --- a/frontend/src/components/users/Profile.tsx +++ b/frontend/src/components/users/Profile.tsx @@ -1 +1,62 @@ -export {}; +import React, { useEffect, useState } from "react"; +import { Box, Button, Grid, Stack, Tab, Tabs, Typography } from "@mui/material"; +import { useParams, useSearchParams } from "react-router-dom"; +import { RootState } from "../../types/data"; +import { useDispatch, useSelector } from "react-redux"; +import Layout from "../Layout"; +import Table from '@mui/material/Table'; +import TableBody from '@mui/material/TableBody'; +import TableCell from '@mui/material/TableCell'; +import TableContainer from '@mui/material/TableContainer'; +import TableHead from '@mui/material/TableHead'; +import TableRow from '@mui/material/TableRow'; +import Paper from '@mui/material/Paper'; +import {fetchUserProfile} from "../../actions/user"; + +export const Profile = (): JSX.Element => { + const dispatch = useDispatch(); + const user = useSelector((state: RootState) => state.user); + const profile = user["profile"]; + const fetchProfile = () => dispatch(fetchUserProfile()); + console.log('user is'); + console.log(user); + // component did mount + useEffect(() => { + fetchProfile(); + console.log(user.profile); + }, []); + + console.log('profile is', profile); + if(profile != null) { + return ( + + + + + + Name + Email + Admin + + + + + + {profile.first_name} {profile.last_name} + {profile.email} + {"false"} + + +
+
+
+ ) + } else { + return ( +

nothing yet

+ ) + } + +} diff --git a/frontend/src/openapi/v2/services/UsersService.ts b/frontend/src/openapi/v2/services/UsersService.ts index a4c3b4f8c..c9d2eb8c8 100644 --- a/frontend/src/openapi/v2/services/UsersService.ts +++ b/frontend/src/openapi/v2/services/UsersService.ts @@ -112,6 +112,18 @@ export class UsersService { }); } + /** + * Get Profile + * @returns UserOut Successful Response + * @throws ApiError + */ + public static getProfileApiV2UsersProfileGet(): CancelablePromise { + return __request({ + method: 'GET', + path: `/api/v2/users/profile`, + }); + } + /** * Get User * @param userId diff --git a/frontend/src/reducers/user.ts b/frontend/src/reducers/user.ts index 34208b230..f379c587f 100644 --- a/frontend/src/reducers/user.ts +++ b/frontend/src/reducers/user.ts @@ -2,13 +2,13 @@ import { DELETE_API_KEY, GENERATE_API_KEY, LIST_API_KEYS, - LOGIN_ERROR, + LOGIN_ERROR, RECEIVE_USER_PROFILE, REGISTER_ERROR, REGISTER_USER, RESET_API_KEY, SET_USER, } from "../actions/user"; -import { UserState } from "../types/data"; +import {Author, Dataset, UserState} from "../types/data"; import { DataAction } from "../types/action"; const defaultState: UserState = { @@ -18,6 +18,7 @@ const defaultState: UserState = { errorMsg: "", hashedKey: "", apiKeys: [], + profile: null, }; const user = (state = defaultState, action: DataAction) => { @@ -27,6 +28,11 @@ const user = (state = defaultState, action: DataAction) => { Authorization: action.Authorization, loginError: false, }); + case RECEIVE_USER_PROFILE: + return Object.assign({}, state, { + profile: action.profile, + loginError:false, + }); case LOGIN_ERROR: return Object.assign({}, state, { Authorization: null, diff --git a/frontend/src/routes.tsx b/frontend/src/routes.tsx index f0bef4435..8c95aaa13 100644 --- a/frontend/src/routes.tsx +++ b/frontend/src/routes.tsx @@ -30,7 +30,7 @@ import { fetchDatasetRole, fetchFileRole } from "./actions/authorization"; import { PageNotFound } from "./components/errors/PageNotFound"; import { Forbidden } from "./components/errors/Forbidden"; import { ApiKeys } from "./components/ApiKeys/ApiKey"; - +import {Profile} from "./components/users/Profile"; // https://dev.to/iamandrewluca/private-route-in-react-router-v6-lg5 const PrivateRoute = (props): JSX.Element => { const { children } = props; @@ -91,6 +91,14 @@ export const AppRoutes = (): JSX.Element => { } /> + + + + } + /> Date: Fri, 28 Apr 2023 09:16:33 -0500 Subject: [PATCH 08/32] Implemented role delete in sharing tab (#472) * Tested by deleting a user, group. Verified entry is removed upon refresh --- frontend/src/actions/dataset.js | 50 +++++ .../sharing/GroupAndRoleTableEntry.tsx | 61 +++++- .../sharing/UserAndRoleTableEntry.tsx | 198 +++++++++++------- frontend/src/reducers/dataset.ts | 6 + 4 files changed, 225 insertions(+), 90 deletions(-) diff --git a/frontend/src/actions/dataset.js b/frontend/src/actions/dataset.js index 3f0137dd6..c3ec30539 100644 --- a/frontend/src/actions/dataset.js +++ b/frontend/src/actions/dataset.js @@ -59,6 +59,56 @@ export function setDatasetUserRole(datasetId, username, roleType) { }; } +export const REMOVE_DATASET_GROUP_ROLE = "REMOVE_DATASET_GROUP_ROLE"; + +export function removeDatasetGroupRole(datasetId, groupId) { + return (dispatch) => { + return V2.AuthorizationService.removeDatasetGroupRoleApiV2AuthorizationsDatasetsDatasetIdGroupRoleGroupIdDelete( + datasetId, + groupId + ) + .then((json) => { + dispatch({ + type: REMOVE_DATASET_GROUP_ROLE, + receivedAt: Date.now(), + }); + }) + .catch((reason) => { + dispatch( + handleErrors( + reason, + removeDatasetGroupRole(datasetId, groupId) + ) + ); + }); + }; +} + +export const REMOVE_DATASET_USER_ROLE = "REMOVE_DATASET_USER_ROLE"; + +export function removeDatasetUserRole(datasetId, username) { + return (dispatch) => { + return V2.AuthorizationService.removeDatasetUserRoleApiV2AuthorizationsDatasetsDatasetIdUserRoleUsernameDelete( + datasetId, + username + ) + .then((json) => { + dispatch({ + type: REMOVE_DATASET_GROUP_ROLE, + receivedAt: Date.now(), + }); + }) + .catch((reason) => { + dispatch( + handleErrors( + reason, + removeDatasetUserRole(datasetId, username) + ) + ); + }); + }; +} + export const RECEIVE_FILES_IN_DATASET = "RECEIVE_FILES_IN_DATASET"; export function fetchFilesInDataset(datasetId, folderId) { diff --git a/frontend/src/components/sharing/GroupAndRoleTableEntry.tsx b/frontend/src/components/sharing/GroupAndRoleTableEntry.tsx index 16b28b0c9..ee388f5aa 100644 --- a/frontend/src/components/sharing/GroupAndRoleTableEntry.tsx +++ b/frontend/src/components/sharing/GroupAndRoleTableEntry.tsx @@ -5,7 +5,12 @@ import TableCell from "@mui/material/TableCell"; import Collapse from "@mui/material/Collapse"; import { GroupAndRole } from "../../openapi/v2"; import { + Button, ButtonGroup, + Dialog, + DialogActions, + DialogContent, + DialogTitle, FormControl, IconButton, InputLabel, @@ -17,9 +22,10 @@ import CheckIcon from "@mui/icons-material/Check"; import CloseIcon from "@mui/icons-material/Close"; import ListIcon from "@mui/icons-material/List"; import EditIcon from "@mui/icons-material/Edit"; +import DeleteIcon from "@mui/icons-material/Delete"; import { RootState } from "../../types/data"; import { theme } from "../../theme"; -import { setDatasetGroupRole } from "../../actions/dataset"; +import { removeDatasetGroupRole, setDatasetGroupRole } from "../../actions/dataset"; import { useParams } from "react-router-dom"; import { GroupAndRoleSubTable } from "./GroupAndRoleSubTable"; @@ -49,8 +55,14 @@ export function GroupAndRoleTableEntry(props: GroupAndRoleTableEntryProps) { role: string | undefined ) => dispatch(setDatasetGroupRole(dataset_id, group_id, role)); + const removeGroupRole = ( + dataset_id: string | undefined, + group_id: string | undefined, + ) => dispatch(removeDatasetGroupRole(dataset_id, group_id)); + const [selectedRole, setSelectedRole] = useState(group_role.role); const [editRoleOn, setEditRoleOn] = useState(false); + const [deleteRoleConfirmation, setDeleteRoleConfirmation] = useState(false); const handleRoleSelection = (e) => { setSelectedRole(e.target.value); @@ -67,8 +79,23 @@ export function GroupAndRoleTableEntry(props: GroupAndRoleTableEntryProps) { setEditRoleOn(false); }; + const handleRoleDelete = () => { + removeGroupRole(datasetId, group_role.group.id); + setDeleteRoleConfirmation(false); + }; + return ( + setDeleteRoleConfirmation(false)}> + Are you sure? + + Do you really want to delete this role? + + + + + + ) : ( - { - setEditRoleOn(true); - }} - > - - + <> + { + setEditRoleOn(true); + }} + > + + + { + setDeleteRoleConfirmation(true); + }} + > + + + )}
diff --git a/frontend/src/components/sharing/UserAndRoleTableEntry.tsx b/frontend/src/components/sharing/UserAndRoleTableEntry.tsx index 2bd06c85c..8c464de42 100644 --- a/frontend/src/components/sharing/UserAndRoleTableEntry.tsx +++ b/frontend/src/components/sharing/UserAndRoleTableEntry.tsx @@ -8,6 +8,10 @@ import PersonIcon from "@mui/icons-material/Person"; import { Button, ButtonGroup, + Dialog, + DialogActions, + DialogContent, + DialogTitle, FormControl, IconButton, InputLabel, @@ -18,9 +22,10 @@ import { AuthWrapper } from "../auth/AuthWrapper"; import CheckIcon from "@mui/icons-material/Check"; import CloseIcon from "@mui/icons-material/Close"; import EditIcon from "@mui/icons-material/Edit"; +import DeleteIcon from "@mui/icons-material/Delete"; import { theme } from "../../theme"; import { UserAndRole } from "../../openapi/v2"; -import { setDatasetUserRole } from "../../actions/dataset"; +import { removeDatasetUserRole, setDatasetUserRole } from "../../actions/dataset"; import { useParams } from "react-router-dom"; type UserAndRoleTableEntryProps = { @@ -47,8 +52,14 @@ export function UserAndRoleTableEntry(props: UserAndRoleTableEntryProps) { role: string | undefined ) => dispatch(setDatasetUserRole(dataset_id, username, role)); + const removeUserRole = ( + dataset_id: string | undefined, + username: string | undefined, + ) => dispatch(removeDatasetUserRole(dataset_id, username)); + const [selectedRole, setSelectedRole] = useState(user_role.role); const [editRoleOn, setEditRoleOn] = useState(false); + const [deleteRoleConfirmation, setDeleteRoleConfirmation] = useState(false); const handleRoleSelection = (e) => { setSelectedRole(e.target.value); @@ -65,88 +76,117 @@ export function UserAndRoleTableEntry(props: UserAndRoleTableEntryProps) { setEditRoleOn(false); }; + const handleRoleDelete = () => { + removeUserRole(datasetId, user_role.user.email); + setDeleteRoleConfirmation(false); + }; + return ( - - - {user_role.user && user_role.user.email ? ( - - ) : ( - - )} - - - {user_role.user.email} - - {editRoleOn ? ( - - Role - - - ) : ( - selectedRole - )} - {/*only owner or editor are allowed to modify roles of the member*/} - + + setDeleteRoleConfirmation(false)}> + Are you sure? + + Do you really want to delete this role? + + + + + + + + + {user_role.user && user_role.user.email ? ( + + ) : ( + + )} + + + {user_role.user.email} + {editRoleOn ? ( - - + Role + + ) : ( - { - setEditRoleOn(true); - }} - > - - + selectedRole )} - - - - + {/*only owner or editor are allowed to modify roles of the member*/} + + {editRoleOn ? ( + + + + + + + + + ) : ( + <> + { + setEditRoleOn(true); + }} + > + + + { + setDeleteRoleConfirmation(true); + }} + > + + + + )} + + + + + ); } diff --git a/frontend/src/reducers/dataset.ts b/frontend/src/reducers/dataset.ts index 6a1bf8390..8b59aceb4 100644 --- a/frontend/src/reducers/dataset.ts +++ b/frontend/src/reducers/dataset.ts @@ -8,6 +8,8 @@ import { RESET_CREATE_DATASET, SET_DATASET_GROUP_ROLE, SET_DATASET_USER_ROLE, + REMOVE_DATASET_GROUP_ROLE, + REMOVE_DATASET_USER_ROLE, UPDATE_DATASET, } from "../actions/dataset"; import { @@ -58,6 +60,10 @@ const dataset = (state = defaultState, action: DataAction) => { return Object.assign({}, state, {}); case SET_DATASET_USER_ROLE: return Object.assign({}, state, {}); + case REMOVE_DATASET_GROUP_ROLE: + return Object.assign({}, state, {}); + case REMOVE_DATASET_USER_ROLE: + return Object.assign({}, state, {}); case UPDATE_FILE: return Object.assign({}, state, { files: state.files.map((file) => From 06952319d615cf729b25fd8a8454e0e77155ca6d Mon Sep 17 00:00:00 2001 From: Todd Nicholson <40038535+tcnichol@users.noreply.github.com> Date: Fri, 28 Apr 2023 11:04:01 -0500 Subject: [PATCH 09/32] Show Creator on Group Page (#428) * the owner is visible, and we cannot change the owner in the table * showing group creator on top * creator link is wrong, need to fix * bold text for word 'creator' * using more conditionals to reduce duplicate code * missing modal added creator name matches other names --------- Co-authored-by: Chen Wang --- frontend/src/components/groups/Group.tsx | 11 ++++++++++- .../src/components/groups/MembersTable.tsx | 5 ++++- .../groups/MembersTableUserEntry.tsx | 19 ++++++++++++++++--- 3 files changed, 30 insertions(+), 5 deletions(-) diff --git a/frontend/src/components/groups/Group.tsx b/frontend/src/components/groups/Group.tsx index bf3ecc8a2..d70c4a31b 100644 --- a/frontend/src/components/groups/Group.tsx +++ b/frontend/src/components/groups/Group.tsx @@ -1,5 +1,5 @@ import React, { useEffect, useState } from "react"; -import { Box, Button, Grid } from "@mui/material"; +import { Box, Button, Grid, Link } from "@mui/material"; import Layout from "../Layout"; import { RootState } from "../../types/data"; import { useDispatch, useSelector } from "react-redux"; @@ -33,8 +33,13 @@ export function Group() { dispatch(deleteGroup(groupId)); const groupAbout = useSelector((state: RootState) => state.group.about); + const role = useSelector((state: RootState) => state.group.role); + const groupCreatorEmail = useSelector( + (state: RootState) => state.group.about.creator + ); + const groupCreatorEmailLink = "mailto:" + groupCreatorEmail; const [addMemberModalOpen, setAddMemberModalOpen] = useState(false); const [deleteGroupConfirmOpen, setDeleteGroupConfirmOpen] = useState(false); @@ -124,6 +129,10 @@ export function Group() { {groupAbout.description} + + Creator: + {groupCreatorEmail} + diff --git a/frontend/src/components/groups/MembersTable.tsx b/frontend/src/components/groups/MembersTable.tsx index 96f88c566..e6ee6b696 100644 --- a/frontend/src/components/groups/MembersTable.tsx +++ b/frontend/src/components/groups/MembersTable.tsx @@ -23,7 +23,7 @@ export default function MembersTable(props: MembersTableProps) { // mapStateToProps const groupAbout = useSelector((state: RootState) => state.group.about) - + const groupCreatorEmail = useSelector((state: RootState) => state.group.about.creator) // dispatch const dispatch = useDispatch(); const groupMemberDeleted = (groupId: string|undefined, username: string|undefined) => dispatch(deleteGroupMember(groupId, username)) @@ -31,6 +31,8 @@ export default function MembersTable(props: MembersTableProps) { const [deleteMemberConfirmOpen, setDeleteMemberConfirmOpen] = useState(false); const [selectMemberUsername, setSelectMemberUsername] = useState(); + + return ( <> {member.user.first_name} {member.user.last_name} {member.user.email} - + {member.user.email == creatorEmail && + {"Owner"} + + } + {member.user.email != creatorEmail && + { editRoleOn ? @@ -118,7 +124,13 @@ export function MembersTableUserEntry(props: MembersTableUserEntryProps) { } - + } + {member.user.email == creatorEmail && + + + } + {member.user.email != creatorEmail && + {/*only owner or editor are allowed to delete*/} { @@ -129,6 +141,7 @@ export function MembersTableUserEntry(props: MembersTableUserEntryProps) { + } ) } From 7ad8356a159aea2c96e8336f56765d643046fb73 Mon Sep 17 00:00:00 2001 From: Aruna Parameswaran Date: Mon, 1 May 2023 10:41:35 -0500 Subject: [PATCH 10/32] Fixed bug where error detail might be empty (#478) * Fixed bug where error detail might be empty * Added null check to the detail string to prevent errors when detail is not set * Second check to validate the body field --- frontend/src/actions/common.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/frontend/src/actions/common.js b/frontend/src/actions/common.js index 6a5435bf1..322634013 100644 --- a/frontend/src/actions/common.js +++ b/frontend/src/actions/common.js @@ -64,7 +64,7 @@ export function handleErrors(reason, originalFunc){ return (dispatch) => { dispatch({ type: NOT_AUTHORIZED, - reason: reason.body.detail, + reason: reason.body !== undefined && reason.body.detail !== undefined? reason.body.detail : "Forbidden", stack: reason.stack ? reason.stack : "", receivedAt: Date.now() }); @@ -73,7 +73,7 @@ export function handleErrors(reason, originalFunc){ return (dispatch) => { dispatch({ type: NOT_FOUND, - reason: reason.body.detail, + reason: reason.body !== undefined && reason.body.detail !== undefined? reason.body.detail : "Not Found", stack: reason.stack ? reason.stack : "", receivedAt: Date.now() }); From 5fc009ae2a427fdd5fa867d108559aacd458900e Mon Sep 17 00:00:00 2001 From: Chen Wang Date: Mon, 1 May 2023 15:02:05 -0500 Subject: [PATCH 11/32] filter the option without group owner --- frontend/src/components/groups/AddMemberModal.tsx | 14 +++++++++----- frontend/src/components/groups/Group.tsx | 1 + 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/frontend/src/components/groups/AddMemberModal.tsx b/frontend/src/components/groups/AddMemberModal.tsx index b8421c8a5..d4223f0e7 100644 --- a/frontend/src/components/groups/AddMemberModal.tsx +++ b/frontend/src/components/groups/AddMemberModal.tsx @@ -2,7 +2,6 @@ import React, { useEffect, useState } from "react"; import { Autocomplete, Button, - Container, Dialog, DialogActions, DialogContent, @@ -19,12 +18,13 @@ import GroupsIcon from "@mui/icons-material/Groups"; type AddMemberModalProps = { open: boolean; handleClose: any; + groupOwner: string; groupName: string; groupId: string | undefined; }; export default function AddMemberModal(props: AddMemberModalProps) { - const { open, handleClose, groupName, groupId } = props; + const { open, handleClose, groupOwner, groupName, groupId } = props; const dispatch = useDispatch(); const listAllUsers = (skip: number, limit: number) => @@ -44,9 +44,13 @@ export default function AddMemberModal(props: AddMemberModalProps) { useEffect(() => { setOptions( - users.reduce((list: string[], user: UserOut) => { - return [...list, user.email]; - }, []) + users + .reduce((list: string[], user: UserOut) => { + return [...list, user.email]; + }, []) + .filter((email) => { + email !== groupOwner; + }) ); }, [users]); diff --git a/frontend/src/components/groups/Group.tsx b/frontend/src/components/groups/Group.tsx index d70c4a31b..c53dd543d 100644 --- a/frontend/src/components/groups/Group.tsx +++ b/frontend/src/components/groups/Group.tsx @@ -94,6 +94,7 @@ export function Group() { handleClose={() => { setAddMemberModalOpen(false); }} + groupOwner={groupAbout.creator} groupName={groupAbout.name} groupId={groupAbout.id} /> From e72c3e12e153976c6fc8c2c03c1fcebfd33fde99 Mon Sep 17 00:00:00 2001 From: Chen Wang Date: Mon, 1 May 2023 15:35:09 -0500 Subject: [PATCH 12/32] filter group owner out --- frontend/src/components/groups/AddMemberModal.tsx | 6 ++---- frontend/src/components/groups/Group.tsx | 2 +- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/frontend/src/components/groups/AddMemberModal.tsx b/frontend/src/components/groups/AddMemberModal.tsx index d4223f0e7..7b8744929 100644 --- a/frontend/src/components/groups/AddMemberModal.tsx +++ b/frontend/src/components/groups/AddMemberModal.tsx @@ -48,11 +48,9 @@ export default function AddMemberModal(props: AddMemberModalProps) { .reduce((list: string[], user: UserOut) => { return [...list, user.email]; }, []) - .filter((email) => { - email !== groupOwner; - }) + .filter((email) => email !== groupOwner) ); - }, [users]); + }, [users, groupOwner]); const handleAddButtonClick = () => { groupMemberAdded(groupId, email); diff --git a/frontend/src/components/groups/Group.tsx b/frontend/src/components/groups/Group.tsx index c53dd543d..72fc0cf68 100644 --- a/frontend/src/components/groups/Group.tsx +++ b/frontend/src/components/groups/Group.tsx @@ -94,7 +94,7 @@ export function Group() { handleClose={() => { setAddMemberModalOpen(false); }} - groupOwner={groupAbout.creator} + groupOwner={groupCreatorEmail} groupName={groupAbout.name} groupId={groupAbout.id} /> From b04bd1fe0217f2a6e8be7681dc6d810e2fa36389 Mon Sep 17 00:00:00 2001 From: Aruna Parameswaran Date: Wed, 3 May 2023 19:57:38 -0500 Subject: [PATCH 13/32] Made the updaterole function async, and added a call to fetch roles to refresh the list of roles in state --- .../src/components/datasets/ShareDatasetModal.tsx | 12 ++++++++---- .../components/datasets/ShareGroupDatasetModal.tsx | 13 +++++++++---- 2 files changed, 17 insertions(+), 8 deletions(-) diff --git a/frontend/src/components/datasets/ShareDatasetModal.tsx b/frontend/src/components/datasets/ShareDatasetModal.tsx index 28ed68a1d..36c898130 100644 --- a/frontend/src/components/datasets/ShareDatasetModal.tsx +++ b/frontend/src/components/datasets/ShareDatasetModal.tsx @@ -19,7 +19,7 @@ import { Typography, } from "@mui/material"; import { useParams } from "react-router-dom"; -import { setDatasetUserRole } from "../../actions/dataset"; +import { fetchDatasetRoles, setDatasetUserRole } from "../../actions/dataset"; import { useDispatch, useSelector } from "react-redux"; import CloseIcon from "@mui/icons-material/Close"; import { fetchAllUsers } from "../../actions/user"; @@ -46,9 +46,12 @@ export default function ShareDatasetModal(props: ShareDatasetModalProps) { const [options, setOptions] = useState([]); const users = useSelector((state: RootState) => state.group.users); - const setUserRole = (datasetId: string, username: string, role: string) => + const setUserRole = async (datasetId: string, username: string, role: string) => dispatch(setDatasetUserRole(datasetId, username, role)); + const getRoles = (datasetId: string | undefined) => + dispatch(fetchDatasetRoles(datasetId)); + useEffect(() => { listAllUsers(0, 21); }, []); @@ -61,11 +64,12 @@ export default function ShareDatasetModal(props: ShareDatasetModalProps) { ); }, [users]); - const onShare = () => { - setUserRole(datasetId, email, role); + const onShare = async () => { + await setUserRole(datasetId, email, role); setEmail(""); setRole("viewer"); setShowSuccessAlert(true); + getRoles(datasetId); }; return ( diff --git a/frontend/src/components/datasets/ShareGroupDatasetModal.tsx b/frontend/src/components/datasets/ShareGroupDatasetModal.tsx index 072d0dd54..b5dcd120d 100644 --- a/frontend/src/components/datasets/ShareGroupDatasetModal.tsx +++ b/frontend/src/components/datasets/ShareGroupDatasetModal.tsx @@ -22,7 +22,7 @@ import { } from "@mui/material"; import { fetchGroups } from "../../actions/group"; import { RootState } from "../../types/data"; -import { setDatasetGroupRole } from "../../actions/dataset"; +import { fetchDatasetRoles, setDatasetGroupRole } from "../../actions/dataset"; import { useParams } from "react-router-dom"; import CloseIcon from "@mui/icons-material/Close"; @@ -44,9 +44,13 @@ export default function ShareGroupDatasetModal( const dispatch = useDispatch(); const listGroups = () => dispatch(fetchGroups(0, 21)); const groups = useSelector((state: RootState) => state.group.groups); - const setGroupRole = (datasetId: string, groupId: string, role: string) => + const setGroupRole = async (datasetId: string, groupId: string, role: string) => dispatch(setDatasetGroupRole(datasetId, groupId, role)); + const getRoles = (datasetId: string | undefined) => + dispatch(fetchDatasetRoles(datasetId)); + + // component did mount useEffect(() => { listGroups(); @@ -60,11 +64,12 @@ export default function ShareGroupDatasetModal( ); }, [groups]); - const onShare = () => { - setGroupRole(datasetId, group.id, role); + const onShare = async () => { + await setGroupRole(datasetId, group.id, role); setGroup({ label: "", id: "" }); setRole("viewer"); setShowSuccessAlert(true); + getRoles(datasetId); }; return ( From 19d1e2b5c646ab7d27d0460a67ac3c181e8d8df3 Mon Sep 17 00:00:00 2001 From: Aruna Parameswaran Date: Wed, 3 May 2023 20:35:27 -0500 Subject: [PATCH 14/32] Updated remove role logic to automatically refresh the roles list --- .../components/sharing/GroupAndRoleTableEntry.tsx | 13 +++++++++---- .../components/sharing/UserAndRoleTableEntry.tsx | 13 +++++++++---- 2 files changed, 18 insertions(+), 8 deletions(-) diff --git a/frontend/src/components/sharing/GroupAndRoleTableEntry.tsx b/frontend/src/components/sharing/GroupAndRoleTableEntry.tsx index ee388f5aa..6faa8ecae 100644 --- a/frontend/src/components/sharing/GroupAndRoleTableEntry.tsx +++ b/frontend/src/components/sharing/GroupAndRoleTableEntry.tsx @@ -25,7 +25,7 @@ import EditIcon from "@mui/icons-material/Edit"; import DeleteIcon from "@mui/icons-material/Delete"; import { RootState } from "../../types/data"; import { theme } from "../../theme"; -import { removeDatasetGroupRole, setDatasetGroupRole } from "../../actions/dataset"; +import { fetchDatasetRoles, removeDatasetGroupRole, setDatasetGroupRole } from "../../actions/dataset"; import { useParams } from "react-router-dom"; import { GroupAndRoleSubTable } from "./GroupAndRoleSubTable"; @@ -55,7 +55,7 @@ export function GroupAndRoleTableEntry(props: GroupAndRoleTableEntryProps) { role: string | undefined ) => dispatch(setDatasetGroupRole(dataset_id, group_id, role)); - const removeGroupRole = ( + const removeGroupRole = async ( dataset_id: string | undefined, group_id: string | undefined, ) => dispatch(removeDatasetGroupRole(dataset_id, group_id)); @@ -78,10 +78,15 @@ export function GroupAndRoleTableEntry(props: GroupAndRoleTableEntryProps) { groupRoleAssigned(datasetId, group_role.group.id, selectedRole); setEditRoleOn(false); }; + + const getRoles = (datasetId: string | undefined) => + dispatch(fetchDatasetRoles(datasetId)); - const handleRoleDelete = () => { - removeGroupRole(datasetId, group_role.group.id); + + const handleRoleDelete = async () => { + await removeGroupRole(datasetId, group_role.group.id); setDeleteRoleConfirmation(false); + getRoles(datasetId) }; return ( diff --git a/frontend/src/components/sharing/UserAndRoleTableEntry.tsx b/frontend/src/components/sharing/UserAndRoleTableEntry.tsx index 8c464de42..0aecb21f1 100644 --- a/frontend/src/components/sharing/UserAndRoleTableEntry.tsx +++ b/frontend/src/components/sharing/UserAndRoleTableEntry.tsx @@ -25,7 +25,7 @@ import EditIcon from "@mui/icons-material/Edit"; import DeleteIcon from "@mui/icons-material/Delete"; import { theme } from "../../theme"; import { UserAndRole } from "../../openapi/v2"; -import { removeDatasetUserRole, setDatasetUserRole } from "../../actions/dataset"; +import { fetchDatasetRoles, removeDatasetUserRole, setDatasetUserRole } from "../../actions/dataset"; import { useParams } from "react-router-dom"; type UserAndRoleTableEntryProps = { @@ -52,11 +52,15 @@ export function UserAndRoleTableEntry(props: UserAndRoleTableEntryProps) { role: string | undefined ) => dispatch(setDatasetUserRole(dataset_id, username, role)); - const removeUserRole = ( + const removeUserRole = async ( dataset_id: string | undefined, username: string | undefined, ) => dispatch(removeDatasetUserRole(dataset_id, username)); + const getRoles = (datasetId: string | undefined) => + dispatch(fetchDatasetRoles(datasetId)); + + const [selectedRole, setSelectedRole] = useState(user_role.role); const [editRoleOn, setEditRoleOn] = useState(false); const [deleteRoleConfirmation, setDeleteRoleConfirmation] = useState(false); @@ -76,9 +80,10 @@ export function UserAndRoleTableEntry(props: UserAndRoleTableEntryProps) { setEditRoleOn(false); }; - const handleRoleDelete = () => { - removeUserRole(datasetId, user_role.user.email); + const handleRoleDelete = async () => { + await removeUserRole(datasetId, user_role.user.email); setDeleteRoleConfirmation(false); + getRoles(datasetId) }; return ( From a297eccb3b118af5c7b532c8906663df4f07ae4a Mon Sep 17 00:00:00 2001 From: Aruna Parameswaran Date: Wed, 3 May 2023 20:38:16 -0500 Subject: [PATCH 15/32] Executed eslint --- frontend/src/components/sharing/GroupAndRoleTableEntry.tsx | 6 +++--- frontend/src/components/sharing/UserAndRoleTableEntry.tsx | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/frontend/src/components/sharing/GroupAndRoleTableEntry.tsx b/frontend/src/components/sharing/GroupAndRoleTableEntry.tsx index 6faa8ecae..74509a121 100644 --- a/frontend/src/components/sharing/GroupAndRoleTableEntry.tsx +++ b/frontend/src/components/sharing/GroupAndRoleTableEntry.tsx @@ -79,14 +79,14 @@ export function GroupAndRoleTableEntry(props: GroupAndRoleTableEntryProps) { setEditRoleOn(false); }; - const getRoles = (datasetId: string | undefined) => - dispatch(fetchDatasetRoles(datasetId)); + const getRoles = (datasetId: string | undefined) => + dispatch(fetchDatasetRoles(datasetId)); const handleRoleDelete = async () => { await removeGroupRole(datasetId, group_role.group.id); setDeleteRoleConfirmation(false); - getRoles(datasetId) + getRoles(datasetId); }; return ( diff --git a/frontend/src/components/sharing/UserAndRoleTableEntry.tsx b/frontend/src/components/sharing/UserAndRoleTableEntry.tsx index 0aecb21f1..c9e46ea11 100644 --- a/frontend/src/components/sharing/UserAndRoleTableEntry.tsx +++ b/frontend/src/components/sharing/UserAndRoleTableEntry.tsx @@ -57,8 +57,8 @@ export function UserAndRoleTableEntry(props: UserAndRoleTableEntryProps) { username: string | undefined, ) => dispatch(removeDatasetUserRole(dataset_id, username)); - const getRoles = (datasetId: string | undefined) => - dispatch(fetchDatasetRoles(datasetId)); + const getRoles = (datasetId: string | undefined) => + dispatch(fetchDatasetRoles(datasetId)); const [selectedRole, setSelectedRole] = useState(user_role.role); @@ -83,7 +83,7 @@ export function UserAndRoleTableEntry(props: UserAndRoleTableEntryProps) { const handleRoleDelete = async () => { await removeUserRole(datasetId, user_role.user.email); setDeleteRoleConfirmation(false); - getRoles(datasetId) + getRoles(datasetId); }; return ( From 174f881a2d2ebd520a4de02f042022a23b8c8474 Mon Sep 17 00:00:00 2001 From: Max Burnette Date: Thu, 4 May 2023 15:04:37 -0500 Subject: [PATCH 16/32] stub for feeds --- backend/app/beanie_views_test.py | 23 +- backend/app/deps/authorization_deps.py | 134 ++++--- backend/app/models/datasets.py | 4 +- backend/app/models/feeds.py | 24 +- .../models/migrations/rename_dataset_field.py | 4 +- backend/app/routers/authorization.py | 132 ++++--- backend/app/routers/datasets.py | 361 +++++++++--------- backend/app/routers/feeds.py | 25 +- backend/app/routers/metadata_datasets.py | 132 +++---- 9 files changed, 451 insertions(+), 388 deletions(-) diff --git a/backend/app/beanie_views_test.py b/backend/app/beanie_views_test.py index d9c229659..e394163e3 100644 --- a/backend/app/beanie_views_test.py +++ b/backend/app/beanie_views_test.py @@ -18,10 +18,12 @@ class Category(BaseModel): class Product(Document): - name: str # You can use normal types just like in pydantic + name: str # You can use normal types just like in pydantic description: Optional[str] = None - price: Indexed(float) # You can also specify that a field should correspond to an index - category: Category # You can include pydantic models as well + price: Indexed( + float + ) # You can also specify that a field should correspond to an index + category: Category # You can include pydantic models as well class Bike(Document): @@ -42,7 +44,7 @@ class Settings: "$group": { "_id": "$type", "number": {"$sum": 1}, - "new": {"$sum": {"$cond": ["$is_new", 1, 0]}} + "new": {"$sum": {"$cond": ["$is_new", 1, 0]}}, } }, ] @@ -55,7 +57,11 @@ async def example(): # Initialize beanie with the Product document class # await init_beanie(database=client.beanie, document_models=[Product, Bike, Metrics, DatasetDBViewList], recreate_views=True,) - await init_beanie(database=client.clowder2, document_models=[DatasetDB, DatasetDBViewList, AuthorizationDB], recreate_views=True,) + await init_beanie( + database=client.clowder2, + document_models=[DatasetDB, DatasetDBViewList, AuthorizationDB], + recreate_views=True, + ) # chocolate = Category(name="Chocolate", description="A preparation of roasted and ground cacao seeds.") # # Beanie documents work just like pydantic models @@ -78,8 +84,11 @@ async def example(): # results = await Metrics.find(Metrics.type == "Road").to_list() # print(results) - results = await DatasetDBViewList.find(DatasetDBViewList.author.email == "lmarini@illinois.edu").to_list() + results = await DatasetDBViewList.find( + DatasetDBViewList.author.email == "lmarini@illinois.edu" + ).to_list() print(results) + if __name__ == "__main__": - asyncio.run(example()) \ No newline at end of file + asyncio.run(example()) diff --git a/backend/app/deps/authorization_deps.py b/backend/app/deps/authorization_deps.py index 711d70872..781eff613 100644 --- a/backend/app/deps/authorization_deps.py +++ b/backend/app/deps/authorization_deps.py @@ -14,15 +14,19 @@ async def get_role( - dataset_id: str, - db: MongoClient = Depends(get_db), - current_user=Depends(get_current_username), + dataset_id: str, + db: MongoClient = Depends(get_db), + current_user=Depends(get_current_username), ) -> RoleType: """Returns the role a specific user has on a dataset. If the user is a creator (owner), they are not listed in the user_ids list.""" - authorization = await AuthorizationDB.find_one(AuthorizationDB.dataset_id == PyObjectId(dataset_id), - Or(AuthorizationDB.creator == current_user, - AuthorizationDB.user_ids == current_user)) + authorization = await AuthorizationDB.find_one( + AuthorizationDB.dataset_id == PyObjectId(dataset_id), + Or( + AuthorizationDB.creator == current_user, + AuthorizationDB.user_ids == current_user, + ), + ) return authorization.role # authorization = await db["authorization"].find_one( # { @@ -37,15 +41,19 @@ async def get_role( async def get_role_by_file( - file_id: str, - db: MongoClient = Depends(get_db), - current_user=Depends(get_current_username), + file_id: str, + db: MongoClient = Depends(get_db), + current_user=Depends(get_current_username), ) -> RoleType: if (file := await db["files"].find_one({"_id": ObjectId(file_id)})) is not None: file_out = FileOut.from_mongo(file) - authorization = await AuthorizationDB.find_one(AuthorizationDB.dataset_id == file_out.dataset_id, - Or(AuthorizationDB.creator == current_user, - AuthorizationDB.user_ids == current_user)) + authorization = await AuthorizationDB.find_one( + AuthorizationDB.dataset_id == file_out.dataset_id, + Or( + AuthorizationDB.creator == current_user, + AuthorizationDB.user_ids == current_user, + ), + ) return authorization.role # authorization = await db["authorization"].find_one( # { @@ -62,19 +70,19 @@ async def get_role_by_file( async def get_role_by_metadata( - metadata_id: str, - db: MongoClient = Depends(get_db), - current_user=Depends(get_current_username), + metadata_id: str, + db: MongoClient = Depends(get_db), + current_user=Depends(get_current_username), ) -> RoleType: if ( - metadata := await db["metadata"].find_one({"_id": ObjectId(metadata_id)}) + metadata := await db["metadata"].find_one({"_id": ObjectId(metadata_id)}) ) is not None: md_out = MetadataOut.from_mongo(metadata) resource_type = md_out.resource.collection resource_id = md_out.resource.resource_id if resource_type == "files": if ( - file := await db["files"].find_one({"_id": ObjectId(resource_id)}) + file := await db["files"].find_one({"_id": ObjectId(resource_id)}) ) is not None: file_out = FileOut.from_mongo(file) authorization = await db["authorization"].find_one( @@ -94,7 +102,7 @@ async def get_role_by_metadata( return role elif resource_type == "datasets": if ( - dataset := await db["datasets"].find_one({"_id": ObjectId(resource_id)}) + dataset := await db["datasets"].find_one({"_id": ObjectId(resource_id)}) ) is not None: dataset_out = DatasetOut.from_mongo(dataset) authorization = await db["authorization"].find_one( @@ -115,9 +123,9 @@ async def get_role_by_metadata( async def get_role_by_group( - group_id: str, - db: MongoClient = Depends(get_db), - current_user=Depends(get_current_username), + group_id: str, + db: MongoClient = Depends(get_db), + current_user=Depends(get_current_username), ) -> RoleType: if (group := await db["groups"].find_one({"_id": ObjectId(group_id)})) is not None: group_out = GroupOut.from_mongo(group) @@ -145,15 +153,19 @@ def __init__(self, role: str): self.role = role async def __call__( - self, - dataset_id: str, - db: MongoClient = Depends(get_db), - current_user: str = Depends(get_current_username), + self, + dataset_id: str, + db: MongoClient = Depends(get_db), + current_user: str = Depends(get_current_username), ): # TODO: Make sure we enforce only one role per user per dataset, or find_one could yield wrong answer here. - authorization = await AuthorizationDB.find_one(AuthorizationDB.dataset_id == PyObjectId(dataset_id), - Or(AuthorizationDB.creator == current_user, - AuthorizationDB.user_ids == current_user)) + authorization = await AuthorizationDB.find_one( + AuthorizationDB.dataset_id == PyObjectId(dataset_id), + Or( + AuthorizationDB.creator == current_user, + AuthorizationDB.user_ids == current_user, + ), + ) # if ( # authorization_q := await db["authorization"].find_one( # { @@ -193,16 +205,20 @@ def __init__(self, role: str): self.role = role async def __call__( - self, - file_id: str, - db: MongoClient = Depends(get_db), - current_user: str = Depends(get_current_username), + self, + file_id: str, + db: MongoClient = Depends(get_db), + current_user: str = Depends(get_current_username), ): if (file := await db["files"].find_one({"_id": ObjectId(file_id)})) is not None: file_out = FileOut.from_mongo(file) - authorization = await AuthorizationDB.find_one(AuthorizationDB.dataset_id == file_out.dataset_id, - Or(AuthorizationDB.creator == current_user, - AuthorizationDB.user_ids == current_user)) + authorization = await AuthorizationDB.find_one( + AuthorizationDB.dataset_id == file_out.dataset_id, + Or( + AuthorizationDB.creator == current_user, + AuthorizationDB.user_ids == current_user, + ), + ) # if ( # authorization_q := await db["authorization"].find_one( # { @@ -238,25 +254,29 @@ def __init__(self, role: str): self.role = role async def __call__( - self, - metadata_id: str, - db: MongoClient = Depends(get_db), - current_user: str = Depends(get_current_username), + self, + metadata_id: str, + db: MongoClient = Depends(get_db), + current_user: str = Depends(get_current_username), ): if ( - metadata := await db["metadata"].find_one({"_id": ObjectId(metadata_id)}) + metadata := await db["metadata"].find_one({"_id": ObjectId(metadata_id)}) ) is not None: md_out = MetadataOut.from_mongo(metadata) resource_type = md_out.resource.collection resource_id = md_out.resource.resource_id if resource_type == "files": if ( - file := await db["files"].find_one({"_id": ObjectId(resource_id)}) + file := await db["files"].find_one({"_id": ObjectId(resource_id)}) ) is not None: file_out = FileOut.from_mongo(file) - authorization = await AuthorizationDB.find_one(AuthorizationDB.dataset_id == file_out.dataset_id, - Or(AuthorizationDB.creator == current_user, - AuthorizationDB.user_ids == current_user)) + authorization = await AuthorizationDB.find_one( + AuthorizationDB.dataset_id == file_out.dataset_id, + Or( + AuthorizationDB.creator == current_user, + AuthorizationDB.user_ids == current_user, + ), + ) # if ( # authorization_q := await db["authorization"].find_one( # { @@ -286,14 +306,18 @@ async def __call__( ) elif resource_type == "datasets": if ( - dataset := await db["datasets"].find_one( - {"_id": ObjectId(resource_id)} - ) + dataset := await db["datasets"].find_one( + {"_id": ObjectId(resource_id)} + ) ) is not None: dataset_out = DatasetOut.from_mongo(dataset) - authorization = await AuthorizationDB.find_one(AuthorizationDB.dataset_id == dataset_out.dataset_id, - Or(AuthorizationDB.creator == current_user, - AuthorizationDB.user_ids == current_user)) + authorization = await AuthorizationDB.find_one( + AuthorizationDB.dataset_id == dataset_out.dataset_id, + Or( + AuthorizationDB.creator == current_user, + AuthorizationDB.user_ids == current_user, + ), + ) # if ( # authorization_q := await db["authorization"].find_one( # { @@ -330,13 +354,13 @@ def __init__(self, role: str): self.role = role async def __call__( - self, - group_id: str, - db: MongoClient = Depends(get_db), - current_user: str = Depends(get_current_username), + self, + group_id: str, + db: MongoClient = Depends(get_db), + current_user: str = Depends(get_current_username), ): if ( - group_q := await db["groups"].find_one({"_id": ObjectId(group_id)}) + group_q := await db["groups"].find_one({"_id": ObjectId(group_id)}) ) is not None: group = GroupOut.from_mongo(group_q) if group.creator == current_user: diff --git a/backend/app/models/datasets.py b/backend/app/models/datasets.py index a3bfef98d..b17218927 100644 --- a/backend/app/models/datasets.py +++ b/backend/app/models/datasets.py @@ -57,7 +57,7 @@ class Settings: class DatasetDBViewList(View, DatasetBase): # FIXME This seems to be required to return _id. Otherwise _id is null in the response. - id: PydanticObjectId = Field(None, alias='_id') + id: PydanticObjectId = Field(None, alias="_id") author: UserOut created: datetime = Field(default_factory=datetime.utcnow) modified: datetime = Field(default_factory=datetime.utcnow) @@ -72,7 +72,7 @@ class Settings: "from": "authorization", "localField": "_id", "foreignField": "dataset_id", - "as": "auth" + "as": "auth", } }, ] diff --git a/backend/app/models/feeds.py b/backend/app/models/feeds.py index 75732e65f..d6e5df0a5 100644 --- a/backend/app/models/feeds.py +++ b/backend/app/models/feeds.py @@ -1,10 +1,13 @@ from datetime import datetime +from typing import Optional, List + +import pymongo +from beanie import Document from pydantic import Field, BaseModel -from typing import Optional, List, Union -from app.models.mongomodel import MongoModel -from app.models.users import UserOut + +from app.models.listeners import FeedListener from app.models.search import SearchObject -from app.models.listeners import EventListenerOut, FeedListener +from app.models.users import UserOut class JobFeed(BaseModel): @@ -24,10 +27,19 @@ class FeedIn(JobFeed): pass -class FeedDB(JobFeed, MongoModel): - author: Optional[UserOut] = None +class FeedDB(Document, JobFeed): + creator: Optional[UserOut] = None updated: datetime = Field(default_factory=datetime.utcnow) + class Settings: + name = "feeds_beanie" + indexes = [ + [ + ("name", pymongo.TEXT), + ("description", pymongo.TEXT), + ], + ] + class FeedOut(FeedDB): pass diff --git a/backend/app/models/migrations/rename_dataset_field.py b/backend/app/models/migrations/rename_dataset_field.py index 7b2d31563..0ed391fc1 100644 --- a/backend/app/models/migrations/rename_dataset_field.py +++ b/backend/app/models/migrations/rename_dataset_field.py @@ -30,7 +30,7 @@ class Settings: class Forward: @iterative_migration() async def views_to_user_views( - self, input_document: DatasetDBv1, output_document: DatasetDB + self, input_document: DatasetDBv1, output_document: DatasetDB ): output_document.user_views = input_document.views @@ -38,6 +38,6 @@ async def views_to_user_views( class Backward: @iterative_migration() async def user_views_to_views( - self, input_document: DatasetDB, output_document: DatasetDBv1 + self, input_document: DatasetDB, output_document: DatasetDBv1 ): output_document.views = input_document.user_views diff --git a/backend/app/routers/authorization.py b/backend/app/routers/authorization.py index 127b7651e..c0b28a259 100644 --- a/backend/app/routers/authorization.py +++ b/backend/app/routers/authorization.py @@ -34,11 +34,11 @@ @router.post("/datasets/{dataset_id}", response_model=AuthorizationDB) async def save_authorization( - dataset_id: str, - authorization_in: AuthorizationBase, - user=Depends(get_current_username), - db: MongoClient = Depends(dependencies.get_db), - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + authorization_in: AuthorizationBase, + user=Depends(get_current_username), + db: MongoClient = Depends(dependencies.get_db), + allow: bool = Depends(Authorization("editor")), ): """Save authorization info in Mongo. This is a triple of dataset_id/user_id/role/group_id.""" @@ -62,7 +62,9 @@ async def save_authorization( authorization_dict = authorization_in.dict() authorization_dict["user_ids"] = user_ids - authorization_db = await AuthorizationDB(**authorization_dict, creator=user).insert() + authorization_db = await AuthorizationDB( + **authorization_dict, creator=user + ).insert() return authorization_db # new_authorization = await db["authorization"].insert_one( # authorization_db.to_mongo() @@ -73,15 +75,19 @@ async def save_authorization( @router.get("/datasets/{dataset_id}/role", response_model=AuthorizationDB) async def get_dataset_role( - dataset_id: str, - current_user=Depends(get_current_username), - db: MongoClient = Depends(get_db), + dataset_id: str, + current_user=Depends(get_current_username), + db: MongoClient = Depends(get_db), ): """Retrieve role of user for a specific dataset.""" # Get group id and the associated users from authorization - authorization = await AuthorizationDB.find_one(AuthorizationDB.dataset_id == PyObjectId(dataset_id), - Or(AuthorizationDB.creator == current_user, - AuthorizationDB.user_ids == current_user)) + authorization = await AuthorizationDB.find_one( + AuthorizationDB.dataset_id == PyObjectId(dataset_id), + Or( + AuthorizationDB.creator == current_user, + AuthorizationDB.user_ids == current_user, + ), + ) # if ( # authorization_q := await db["authorization"].find_one( # { @@ -104,7 +110,7 @@ async def get_dataset_role( @router.get("/datasets/{dataset_id}/role/viewer") async def get_dataset_role_viewer( - dataset_id: str, allow: bool = Depends(Authorization("viewer")) + dataset_id: str, allow: bool = Depends(Authorization("viewer")) ): """Used for testing only. Returns true if user has viewer permission on dataset, otherwise throws a 403 Forbidden HTTP exception. See `routers/authorization.py` for more info.""" @@ -113,7 +119,7 @@ async def get_dataset_role_viewer( @router.get("/datasets/{dataset_id}/role/owner") async def get_dataset_role_owner( - dataset_id: str, allow: bool = Depends(Authorization("owner")) + dataset_id: str, allow: bool = Depends(Authorization("owner")) ): """Used for testing only. Returns true if user has owner permission on dataset, otherwise throws a 403 Forbidden HTTP exception. See `routers/authorization.py` for more info.""" @@ -122,9 +128,9 @@ async def get_dataset_role_owner( @router.get("/files/{file_id}/role", response_model=RoleType) async def get_file_role( - file_id: str, - current_user=Depends(get_current_username), - role: RoleType = Depends(get_role_by_file), + file_id: str, + current_user=Depends(get_current_username), + role: RoleType = Depends(get_role_by_file), ): """Retrieve role of user for an individual file. Role cannot change between file versions.""" return role @@ -132,9 +138,9 @@ async def get_file_role( @router.get("/metadata/{metadata_id}/role", response_model=AuthorizationMetadata) async def get_metadata_role( - metadata_id: str, - current_user=Depends(get_current_username), - role: RoleType = Depends(get_role_by_metadata), + metadata_id: str, + current_user=Depends(get_current_username), + role: RoleType = Depends(get_role_by_metadata), ): """Retrieve role of user for group. Group roles can be OWNER, EDITOR, or VIEWER (for regular Members).""" return role @@ -142,9 +148,9 @@ async def get_metadata_role( @router.get("/groups/{group_id}/role", response_model=RoleType) async def get_group_role( - group_id: str, - current_user=Depends(get_current_username), - role: RoleType = Depends(get_role_by_group), + group_id: str, + current_user=Depends(get_current_username), + role: RoleType = Depends(get_role_by_group), ): """Retrieve role of user on a particular group (i.e. whether they can change group memberships).""" return role @@ -155,26 +161,28 @@ async def get_group_role( response_model=AuthorizationDB, ) async def set_dataset_group_role( - dataset_id: str, - group_id: str, - role: RoleType, - db: MongoClient = Depends(get_db), - user_id=Depends(get_user), - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + group_id: str, + role: RoleType, + db: MongoClient = Depends(get_db), + user_id=Depends(get_user), + allow: bool = Depends(Authorization("editor")), ): """Assign an entire group a specific role for a dataset.""" if ( - dataset_q := await db["datasets"].find_one({"_id": ObjectId(dataset_id)}) + dataset_q := await db["datasets"].find_one({"_id": ObjectId(dataset_id)}) ) is not None: dataset = DatasetOut.from_mongo(dataset_q) if ( - group_q := await db["groups"].find_one({"_id": ObjectId(group_id)}) + group_q := await db["groups"].find_one({"_id": ObjectId(group_id)}) ) is not None: group = GroupOut.from_mongo(group_q) # First, remove any existing role the group has on the dataset await remove_dataset_group_role(dataset_id, group_id, db, user_id, allow) - auth_db = await AuthorizationDB.find_one(AuthorizationDB.dataset_id == PyObjectId(dataset_id), - AuthorizationDB.role == role) + auth_db = await AuthorizationDB.find_one( + AuthorizationDB.dataset_id == PyObjectId(dataset_id), + AuthorizationDB.role == role, + ) # if ( # auth_q := await db["authorization"].find_one( # {"dataset_id": ObjectId(dataset_id), "role": role} @@ -216,24 +224,26 @@ async def set_dataset_group_role( "/datasets/{dataset_id}/user_role/{username}/{role}", response_model=AuthorizationDB ) async def set_dataset_user_role( - dataset_id: str, - username: str, - role: RoleType, - db: MongoClient = Depends(get_db), - user_id=Depends(get_user), - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + username: str, + role: RoleType, + db: MongoClient = Depends(get_db), + user_id=Depends(get_user), + allow: bool = Depends(Authorization("editor")), ): """Assign a single user a specific role for a dataset.""" if ( - dataset_q := await db["datasets"].find_one({"_id": ObjectId(dataset_id)}) + dataset_q := await db["datasets"].find_one({"_id": ObjectId(dataset_id)}) ) is not None: dataset = DatasetOut.from_mongo(dataset_q) if (user_q := await db["users"].find_one({"email": username})) is not None: # First, remove any existing role the user has on the dataset await remove_dataset_user_role(dataset_id, username, db, user_id, allow) - auth_db = await AuthorizationDB.find_one(AuthorizationDB.dataset_id == PyObjectId(dataset_id), - AuthorizationDB.role == role) + auth_db = await AuthorizationDB.find_one( + AuthorizationDB.dataset_id == PyObjectId(dataset_id), + AuthorizationDB.role == role, + ) # if ( # auth_q := await db["authorization"].find_one( # {"dataset_id": ObjectId(dataset_id), "role": role} @@ -269,24 +279,26 @@ async def set_dataset_user_role( response_model=AuthorizationDB, ) async def remove_dataset_group_role( - dataset_id: str, - group_id: str, - db: MongoClient = Depends(get_db), - user_id=Depends(get_user), - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + group_id: str, + db: MongoClient = Depends(get_db), + user_id=Depends(get_user), + allow: bool = Depends(Authorization("editor")), ): """Remove any role the group has with a specific dataset.""" if ( - dataset_q := await db["datasets"].find_one({"_id": ObjectId(dataset_id)}) + dataset_q := await db["datasets"].find_one({"_id": ObjectId(dataset_id)}) ) is not None: dataset = DatasetOut.from_mongo(dataset_q) if ( - group_q := await db["groups"].find_one({"_id": ObjectId(group_id)}) + group_q := await db["groups"].find_one({"_id": ObjectId(group_id)}) ) is not None: group = GroupOut.from_mongo(group_q) - auth_db = await AuthorizationDB.find_one(AuthorizationDB.dataset_id == PyObjectId(dataset_id), - AuthorizationDB.group_ids == group_id) + auth_db = await AuthorizationDB.find_one( + AuthorizationDB.dataset_id == PyObjectId(dataset_id), + AuthorizationDB.group_ids == group_id, + ) # if ( # auth_q := await db["authorization"].find_one( # { @@ -318,21 +330,23 @@ async def remove_dataset_group_role( response_model=AuthorizationDB, ) async def remove_dataset_user_role( - dataset_id: str, - username: str, - db: MongoClient = Depends(get_db), - user_id=Depends(get_user), - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + username: str, + db: MongoClient = Depends(get_db), + user_id=Depends(get_user), + allow: bool = Depends(Authorization("editor")), ): """Remove any role the user has with a specific dataset.""" if ( - dataset_q := await db["datasets"].find_one({"_id": ObjectId(dataset_id)}) + dataset_q := await db["datasets"].find_one({"_id": ObjectId(dataset_id)}) ) is not None: dataset = DatasetOut.from_mongo(dataset_q) if (user_q := await db["users"].find_one({"email": username})) is not None: - auth_db = await AuthorizationDB.find_one(AuthorizationDB.dataset_id == PyObjectId(dataset_id), - AuthorizationDB.user_ids == username) + auth_db = await AuthorizationDB.find_one( + AuthorizationDB.dataset_id == PyObjectId(dataset_id), + AuthorizationDB.user_ids == username, + ) # if ( # auth_q := await db["authorization"].find_one( # {"dataset_id": ObjectId(dataset_id), "user_ids": username} diff --git a/backend/app/routers/datasets.py b/backend/app/routers/datasets.py index d4344768c..a4c1f16dc 100644 --- a/backend/app/routers/datasets.py +++ b/backend/app/routers/datasets.py @@ -37,7 +37,8 @@ DatasetIn, DatasetDB, DatasetOut, - DatasetPatch, DatasetDBViewList, + DatasetPatch, + DatasetDBViewList, ) from app.models.files import FileOut, FileDB from app.models.folders import FolderOut, FolderIn, FolderDB @@ -124,13 +125,13 @@ def nested_update(target_dict, update_dict): async def _create_folder_structure( - dataset_id: str, - contents: dict, - folder_path: str, - folder_lookup: dict, - user: UserOut, - db: MongoClient, - parent_folder_id: Optional[str] = None, + dataset_id: str, + contents: dict, + folder_path: str, + folder_lookup: dict, + user: UserOut, + db: MongoClient, + parent_folder_id: Optional[str] = None, ): """Recursively create folders encountered in folder_path until the target folder is created. Arguments: @@ -166,9 +167,9 @@ async def _create_folder_structure( async def _get_folder_hierarchy( - folder_id: str, - hierarchy: str, - db: MongoClient, + folder_id: str, + hierarchy: str, + db: MongoClient, ): """Generate a string of nested path to folder for use in zip file creation.""" found = await db["folders"].find_one({"_id": ObjectId(folder_id)}) @@ -180,8 +181,8 @@ async def _get_folder_hierarchy( async def remove_folder_entry( - folder_id: Union[str, ObjectId], - db: MongoClient, + folder_id: Union[str, ObjectId], + db: MongoClient, ): """Remove FolderDB object into MongoDB""" await db["folders"].delete_one({"_id": ObjectId(folder_id)}) @@ -189,10 +190,10 @@ async def remove_folder_entry( @router.post("", response_model=DatasetOut) async def save_dataset( - dataset_in: DatasetIn, - user=Depends(keycloak_auth.get_current_user), - db: MongoClient = Depends(dependencies.get_db), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + dataset_in: DatasetIn, + user=Depends(keycloak_auth.get_current_user), + db: MongoClient = Depends(dependencies.get_db), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), ): # Check all connection and abort if any one of them is not available if db is None or es is None: @@ -235,11 +236,11 @@ async def save_dataset( @router.get("", response_model=List[DatasetOut]) async def get_datasets( - user_id=Depends(get_user), - db: MongoClient = Depends(dependencies.get_db), - skip: int = 0, - limit: int = 10, - mine: bool = False, + user_id=Depends(get_user), + db: MongoClient = Depends(dependencies.get_db), + skip: int = 0, + limit: int = 10, + mine: bool = False, ): if mine: return await DatasetDBViewList.find( @@ -251,7 +252,8 @@ async def get_datasets( }, sort=("created", DESCENDING), skip=skip, - limit=limit).to_list() + limit=limit, + ).to_list() # for doc in ( # await db["datasets_view"] # .find( @@ -278,7 +280,8 @@ async def get_datasets( }, sort=("created", DESCENDING), skip=skip, - limit=limit).to_list() + limit=limit, + ).to_list() # for doc in ( # await db["datasets_view"] # .find( @@ -300,9 +303,9 @@ async def get_datasets( @router.get("/{dataset_id}", response_model=DatasetOut) async def get_dataset( - dataset_id: str, - # db: MongoClient = Depends(dependencies.get_db), - allow: bool = Depends(Authorization("viewer")), + dataset_id: str, + # db: MongoClient = Depends(dependencies.get_db), + allow: bool = Depends(Authorization("viewer")), ): return await DatasetDB.get(dataset_id) # try: @@ -316,61 +319,61 @@ async def get_dataset( @router.get("/{dataset_id}/files") async def get_dataset_files( - dataset_id: str, - folder_id: Optional[str] = None, - user_id=Depends(get_user), - db: MongoClient = Depends(dependencies.get_db), - allow: bool = Depends(Authorization("viewer")), - skip: int = 0, - limit: int = 10, + dataset_id: str, + folder_id: Optional[str] = None, + user_id=Depends(get_user), + db: MongoClient = Depends(dependencies.get_db), + allow: bool = Depends(Authorization("viewer")), + skip: int = 0, + limit: int = 10, ): files = [] if folder_id is not None: for f in ( - await db["files_view"] - .find( - { - "$and": [ - { - "dataset_id": ObjectId(dataset_id), - "folder_id": ObjectId(folder_id), - }, - { - "$or": [ - {"creator.email": user_id}, - {"auth": {"$elemMatch": {"user_ids": user_id}}}, - ] - }, - ] - } - ) - .skip(skip) - .limit(limit) - .to_list(length=limit) + await db["files_view"] + .find( + { + "$and": [ + { + "dataset_id": ObjectId(dataset_id), + "folder_id": ObjectId(folder_id), + }, + { + "$or": [ + {"creator.email": user_id}, + {"auth": {"$elemMatch": {"user_ids": user_id}}}, + ] + }, + ] + } + ) + .skip(skip) + .limit(limit) + .to_list(length=limit) ): files.append(FileOut.from_mongo(f)) else: for f in ( - await db["files_view"] - .find( - { - "$and": [ - { - "dataset_id": ObjectId(dataset_id), - "folder_id": None, - }, - { - "$or": [ - {"creator.email": user_id}, - {"auth": {"$elemMatch": {"user_ids": user_id}}}, - ] - }, - ] - } - ) - .skip(skip) - .limit(limit) - .to_list(length=limit) + await db["files_view"] + .find( + { + "$and": [ + { + "dataset_id": ObjectId(dataset_id), + "folder_id": None, + }, + { + "$or": [ + {"creator.email": user_id}, + {"auth": {"$elemMatch": {"user_ids": user_id}}}, + ] + }, + ] + } + ) + .skip(skip) + .limit(limit) + .to_list(length=limit) ): files.append(FileOut.from_mongo(f)) return files @@ -378,12 +381,12 @@ async def get_dataset_files( @router.put("/{dataset_id}", response_model=DatasetOut) async def edit_dataset( - dataset_id: str, - dataset_info: DatasetBase, - db: MongoClient = Depends(dependencies.get_db), - user_id=Depends(get_user), - es=Depends(dependencies.get_elasticsearchclient), - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + dataset_info: DatasetBase, + db: MongoClient = Depends(dependencies.get_db), + user_id=Depends(get_user), + es=Depends(dependencies.get_elasticsearchclient), + allow: bool = Depends(Authorization("editor")), ): if not allow: raise HTTPException(status_code=404, detail=f"Dataset {dataset_id} not found") @@ -393,7 +396,7 @@ async def edit_dataset( return if ( - dataset := await db["datasets"].find_one({"_id": ObjectId(dataset_id)}) + dataset := await db["datasets"].find_one({"_id": ObjectId(dataset_id)}) ) is not None: # TODO: Refactor this with permissions checks etc. ds = dict(dataset_info) if dataset_info is not None else {} @@ -417,9 +420,9 @@ async def edit_dataset( update_record(es, "dataset", doc, dataset_id) # updating metadata in elasticsearch if ( - metadata := await db["metadata"].find_one( - {"resource.resource_id": ObjectId(dataset_id)} - ) + metadata := await db["metadata"].find_one( + {"resource.resource_id": ObjectId(dataset_id)} + ) ) is not None: doc = { "doc": { @@ -437,12 +440,12 @@ async def edit_dataset( @router.patch("/{dataset_id}", response_model=DatasetOut) async def patch_dataset( - dataset_id: str, - dataset_info: DatasetPatch, - user_id=Depends(get_user), - db: MongoClient = Depends(dependencies.get_db), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + dataset_info: DatasetPatch, + user_id=Depends(get_user), + db: MongoClient = Depends(dependencies.get_db), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + allow: bool = Depends(Authorization("editor")), ): if not allow: raise HTTPException(status_code=404, detail=f"Dataset {dataset_id} not found") @@ -452,7 +455,7 @@ async def patch_dataset( return if ( - dataset := await db["datasets"].find_one({"_id": ObjectId(dataset_id)}) + dataset := await db["datasets"].find_one({"_id": ObjectId(dataset_id)}) ) is not None: # TODO: Refactor this with permissions checks etc. ds = dict(dataset_info) if dataset_info is not None else {} @@ -476,9 +479,9 @@ async def patch_dataset( update_record(es, "dataset", doc, dataset_id) # updating metadata in elasticsearch if ( - metadata := await db["metadata"].find_one( - {"resource.resource_id": ObjectId(dataset_id)} - ) + metadata := await db["metadata"].find_one( + {"resource.resource_id": ObjectId(dataset_id)} + ) ) is not None: doc = { "doc": { @@ -495,11 +498,11 @@ async def patch_dataset( @router.delete("/{dataset_id}") async def delete_dataset( - dataset_id: str, - db: MongoClient = Depends(dependencies.get_db), - fs: Minio = Depends(dependencies.get_fs), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + db: MongoClient = Depends(dependencies.get_db), + fs: Minio = Depends(dependencies.get_fs), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + allow: bool = Depends(Authorization("editor")), ): if not allow: raise HTTPException(status_code=404, detail=f"Dataset {dataset_id} not found") @@ -528,11 +531,11 @@ async def delete_dataset( @router.post("/{dataset_id}/folders", response_model=FolderOut) async def add_folder( - dataset_id: str, - folder_in: FolderIn, - user=Depends(get_current_user), - db: MongoClient = Depends(dependencies.get_db), - allow: bool = Depends(Authorization("uploader")), + dataset_id: str, + folder_in: FolderIn, + user=Depends(get_current_user), + db: MongoClient = Depends(dependencies.get_db), + allow: bool = Depends(Authorization("uploader")), ): if not allow: raise HTTPException(status_code=404, detail=f"Dataset {dataset_id} not found") @@ -555,36 +558,36 @@ async def add_folder( @router.get("/{dataset_id}/folders") async def get_dataset_folders( - dataset_id: str, - parent_folder: Optional[str] = None, - user_id=Depends(get_user), - db: MongoClient = Depends(dependencies.get_db), - allow: bool = Depends(Authorization("viewer")), + dataset_id: str, + parent_folder: Optional[str] = None, + user_id=Depends(get_user), + db: MongoClient = Depends(dependencies.get_db), + allow: bool = Depends(Authorization("viewer")), ): if not allow: raise HTTPException(status_code=404, detail=f"Dataset {dataset_id} not found") folders = [] if parent_folder is None: async for f in db["folders"].find( - {"dataset_id": ObjectId(dataset_id), "parent_folder": None} + {"dataset_id": ObjectId(dataset_id), "parent_folder": None} ): folders.append(FolderDB.from_mongo(f)) else: async for f in db["folders"].find( - { - "$and": [ - { - "dataset_id": ObjectId(dataset_id), - "parent_folder": ObjectId(parent_folder), - }, - { - "$or": [ - {"author.email": user_id}, - {"auth": {"$elemMatch": {"user_ids": user_id}}}, - ] - }, - ] - } + { + "$and": [ + { + "dataset_id": ObjectId(dataset_id), + "parent_folder": ObjectId(parent_folder), + }, + { + "$or": [ + {"author.email": user_id}, + {"auth": {"$elemMatch": {"user_ids": user_id}}}, + ] + }, + ] + } ): folders.append(FolderDB.from_mongo(f)) return folders @@ -592,12 +595,12 @@ async def get_dataset_folders( @router.delete("/{dataset_id}/folders/{folder_id}") async def delete_folder( - dataset_id: str, - folder_id: str, - db: MongoClient = Depends(dependencies.get_db), - fs: Minio = Depends(dependencies.get_fs), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + folder_id: str, + db: MongoClient = Depends(dependencies.get_db), + fs: Minio = Depends(dependencies.get_fs), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + allow: bool = Depends(Authorization("editor")), ): if not allow: raise HTTPException(status_code=404, detail=f"Dataset {dataset_id} not found") @@ -613,18 +616,18 @@ async def delete_folder( async def _delete_nested_folders(parent_folder_id): while ( - folders := await db["folders"].find_one( - { - "dataset_id": ObjectId(dataset_id), - "parent_folder": ObjectId(parent_folder_id), - } - ) + folders := await db["folders"].find_one( + { + "dataset_id": ObjectId(dataset_id), + "parent_folder": ObjectId(parent_folder_id), + } + ) ) is not None: async for folder in db["folders"].find( - { - "dataset_id": ObjectId(dataset_id), - "parent_folder": ObjectId(parent_folder_id), - } + { + "dataset_id": ObjectId(dataset_id), + "parent_folder": ObjectId(parent_folder_id), + } ): folder = FolderOut(**folder) parent_folder_id = folder.id @@ -634,7 +637,7 @@ async def _delete_nested_folders(parent_folder_id): await remove_folder_entry(folder.id, db) async for file in db["files"].find( - {"folder_id": ObjectId(folder.id)} + {"folder_id": ObjectId(folder.id)} ): file = FileOut(**file) await remove_file_entry(file.id, db, fs, es) @@ -648,21 +651,21 @@ async def _delete_nested_folders(parent_folder_id): @router.post("/{dataset_id}/files", response_model=FileOut) async def save_file( - dataset_id: str, - folder_id: Optional[str] = None, - user=Depends(get_current_user), - db: MongoClient = Depends(dependencies.get_db), - fs: Minio = Depends(dependencies.get_fs), - file: UploadFile = File(...), - es=Depends(dependencies.get_elasticsearchclient), - rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), - credentials: HTTPAuthorizationCredentials = Security(security), - allow: bool = Depends(Authorization("uploader")), + dataset_id: str, + folder_id: Optional[str] = None, + user=Depends(get_current_user), + db: MongoClient = Depends(dependencies.get_db), + fs: Minio = Depends(dependencies.get_fs), + file: UploadFile = File(...), + es=Depends(dependencies.get_elasticsearchclient), + rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), + credentials: HTTPAuthorizationCredentials = Security(security), + allow: bool = Depends(Authorization("uploader")), ): if not allow: raise HTTPException(status_code=404, detail=f"Dataset {dataset_id} not found") if ( - dataset := await db["datasets"].find_one({"_id": ObjectId(dataset_id)}) + dataset := await db["datasets"].find_one({"_id": ObjectId(dataset_id)}) ) is not None: if user is None: raise HTTPException( @@ -673,7 +676,7 @@ async def save_file( if folder_id is not None: if ( - folder := await db["folders"].find_one({"_id": ObjectId(folder_id)}) + folder := await db["folders"].find_one({"_id": ObjectId(folder_id)}) ) is not None: folder = FolderOut.from_mongo(folder) fileDB.folder_id = folder.id @@ -702,13 +705,13 @@ async def save_file( @router.post("/createFromZip", response_model=DatasetOut) async def create_dataset_from_zip( - user=Depends(get_current_user), - db: MongoClient = Depends(dependencies.get_db), - fs: Minio = Depends(dependencies.get_fs), - file: UploadFile = File(...), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), - token: str = Depends(get_token), + user=Depends(get_current_user), + db: MongoClient = Depends(dependencies.get_db), + fs: Minio = Depends(dependencies.get_fs), + file: UploadFile = File(...), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), + token: str = Depends(get_token), ): if user is None: raise HTTPException( @@ -784,16 +787,16 @@ async def create_dataset_from_zip( @router.get("/{dataset_id}/download", response_model=DatasetOut) async def download_dataset( - dataset_id: str, - user=Depends(get_current_user), - db: MongoClient = Depends(dependencies.get_db), - fs: Minio = Depends(dependencies.get_fs), - allow: bool = Depends(Authorization("viewer")), + dataset_id: str, + user=Depends(get_current_user), + db: MongoClient = Depends(dependencies.get_db), + fs: Minio = Depends(dependencies.get_fs), + allow: bool = Depends(Authorization("viewer")), ): if not allow: raise HTTPException(status_code=404, detail=f"Dataset {dataset_id} not found") if ( - dataset := await db["datasets"].find_one({"_id": ObjectId(dataset_id)}) + dataset := await db["datasets"].find_one({"_id": ObjectId(dataset_id)}) ) is not None: dataset = DatasetOut(**dataset) current_temp_dir = tempfile.mkdtemp(prefix="rocratedownload") @@ -821,7 +824,7 @@ async def download_dataset( # Write dataset metadata if found metadata = [] async for md in db["metadata"].find( - {"resource.resource_id": ObjectId(dataset_id)} + {"resource.resource_id": ObjectId(dataset_id)} ): metadata.append(md) if len(metadata) > 0: @@ -871,7 +874,7 @@ async def download_dataset( metadata = [] async for md in db["metadata"].find( - {"resource.resource_id": ObjectId(file.id)} + {"resource.resource_id": ObjectId(file.id)} ): metadata.append(md) if len(metadata) > 0: @@ -949,21 +952,21 @@ async def download_dataset( # can handle parameeters pass in as key/values in info @router.post("/{dataset_id}/extract") async def get_dataset_extract( - dataset_id: str, - extractorName: str, - request: Request, - # parameters don't have a fixed model shape - parameters: dict = None, - user=Depends(get_current_user), - credentials: HTTPAuthorizationCredentials = Security(security), - db: MongoClient = Depends(dependencies.get_db), - rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), - allow: bool = Depends(Authorization("uploader")), + dataset_id: str, + extractorName: str, + request: Request, + # parameters don't have a fixed model shape + parameters: dict = None, + user=Depends(get_current_user), + credentials: HTTPAuthorizationCredentials = Security(security), + db: MongoClient = Depends(dependencies.get_db), + rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), + allow: bool = Depends(Authorization("uploader")), ): if not allow: raise HTTPException(status_code=404, detail=f"Dataset {dataset_id} not found") if ( - dataset := await db["datasets"].find_one({"_id": ObjectId(dataset_id)}) + dataset := await db["datasets"].find_one({"_id": ObjectId(dataset_id)}) ) is not None: dataset_out = DatasetOut.from_mongo(dataset) access_token = credentials.credentials diff --git a/backend/app/routers/feeds.py b/backend/app/routers/feeds.py index 9a525d25d..ec738d31e 100644 --- a/backend/app/routers/feeds.py +++ b/backend/app/routers/feeds.py @@ -1,25 +1,26 @@ -import pymongo from typing import List, Optional + +import pymongo from bson import ObjectId -from fastapi import APIRouter, HTTPException, Depends, Request -from pymongo import MongoClient +from fastapi import APIRouter, HTTPException, Depends from pika.adapters.blocking_connection import BlockingChannel +from pymongo import MongoClient from app.dependencies import get_db -from app.keycloak_auth import get_user, get_current_user -from app.models.users import UserOut -from app.models.files import FileOut -from app.models.listeners import ( - FeedListener, - EventListenerOut, -) +from app.keycloak_auth import get_current_user from app.models.feeds import ( FeedIn, FeedDB, FeedOut, ) -from app.search.connect import check_search_result +from app.models.files import FileOut +from app.models.listeners import ( + FeedListener, + EventListenerOut, +) +from app.models.users import UserOut from app.rabbitmq.listeners import submit_file_job +from app.search.connect import check_search_result router = APIRouter() @@ -103,7 +104,7 @@ async def save_feed( db: MongoClient = Depends(get_db), ): """Create a new Feed (i.e. saved search) in the database.""" - feed = FeedDB(**feed_in.dict(), author=user) + feed = FeedDB(**feed_in.dict(), creator=user) new_feed = await db["feeds"].insert_one(feed.to_mongo()) found = await db["feeds"].find_one({"_id": new_feed.inserted_id}) feed_out = FeedOut.from_mongo(found) diff --git a/backend/app/routers/metadata_datasets.py b/backend/app/routers/metadata_datasets.py index fe39cc2da..4a7851477 100644 --- a/backend/app/routers/metadata_datasets.py +++ b/backend/app/routers/metadata_datasets.py @@ -32,11 +32,11 @@ async def _build_metadata_db_obj( - db: MongoClient, - metadata_in: MetadataIn, - dataset: DatasetOut, - user: UserOut, - agent: MetadataAgent = None, + db: MongoClient, + metadata_in: MetadataIn, + dataset: DatasetOut, + user: UserOut, + agent: MetadataAgent = None, ): content = await validate_context( db, @@ -51,9 +51,9 @@ async def _build_metadata_db_obj( if metadata_in.extractor is not None: extractor_in = LegacyEventListenerIn(**metadata_in.extractor.dict()) if ( - extractor := await db["listeners"].find_one( - {"_id": extractor_in.id, "version": extractor_in.version} - ) + extractor := await db["listeners"].find_one( + {"_id": extractor_in.id, "version": extractor_in.version} + ) ) is not None: agent = MetadataAgent(creator=user, extractor=extractor) else: @@ -75,12 +75,12 @@ async def _build_metadata_db_obj( @router.post("/{dataset_id}/metadata", response_model=MetadataOut) async def add_dataset_metadata( - metadata_in: MetadataIn, - dataset_id: str, - user=Depends(get_current_user), - db: MongoClient = Depends(dependencies.get_db), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - allow: bool = Depends(Authorization("uploader")), + metadata_in: MetadataIn, + dataset_id: str, + user=Depends(get_current_user), + db: MongoClient = Depends(dependencies.get_db), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + allow: bool = Depends(Authorization("uploader")), ): """Attach new metadata to a dataset. The body must include a contents field with the JSON metadata, and either a context JSON-LD object, context_url, or definition (name of a metadata definition) to be valid. @@ -89,7 +89,7 @@ async def add_dataset_metadata( Metadata document that was added to database """ if ( - dataset := await db["datasets"].find_one({"_id": ObjectId(dataset_id)}) + dataset := await db["datasets"].find_one({"_id": ObjectId(dataset_id)}) ) is not None: dataset = DatasetOut(**dataset) # If dataset already has metadata using this definition, don't allow duplication @@ -132,12 +132,12 @@ async def add_dataset_metadata( @router.put("/{dataset_id}/metadata", response_model=MetadataOut) async def replace_dataset_metadata( - metadata_in: MetadataIn, - dataset_id: str, - user=Depends(get_current_user), - db: MongoClient = Depends(dependencies.get_db), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - allow: bool = Depends(Authorization("editor")), + metadata_in: MetadataIn, + dataset_id: str, + user=Depends(get_current_user), + db: MongoClient = Depends(dependencies.get_db), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + allow: bool = Depends(Authorization("editor")), ): """Update metadata. Any fields provided in the contents JSON will be added or updated in the metadata. If context or agent should be changed, use PUT. @@ -146,19 +146,19 @@ async def replace_dataset_metadata( Metadata document that was updated """ if ( - dataset := await db["datasets"].find_one({"_id": ObjectId(dataset_id)}) + dataset := await db["datasets"].find_one({"_id": ObjectId(dataset_id)}) ) is not None: query = {"resource.resource_id": ObjectId(dataset_id)} # Filter by MetadataAgent if metadata_in.extractor is not None: if ( - extractor := await db["listeners"].find_one( - { - "name": metadata_in.extractor.name, - "version": metadata_in.extractor.version, - } - ) + extractor := await db["listeners"].find_one( + { + "name": metadata_in.extractor.name, + "version": metadata_in.extractor.version, + } + ) ) is not None: agent = MetadataAgent(creator=user, extractor=extractor) # TODO: How do we handle two different users creating extractor metadata? Currently we ignore user @@ -188,12 +188,12 @@ async def replace_dataset_metadata( @router.patch("/{dataset_id}/metadata", response_model=MetadataOut) async def update_dataset_metadata( - metadata_in: MetadataPatch, - dataset_id: str, - user=Depends(get_current_user), - db: MongoClient = Depends(dependencies.get_db), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - allow: bool = Depends(Authorization("editor")), + metadata_in: MetadataPatch, + dataset_id: str, + user=Depends(get_current_user), + db: MongoClient = Depends(dependencies.get_db), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + allow: bool = Depends(Authorization("editor")), ): """Update metadata. Any fields provided in the contents JSON will be added or updated in the metadata. If context or agent should be changed, use PUT. @@ -202,7 +202,7 @@ async def update_dataset_metadata( Metadata document that was updated """ if ( - dataset := await db["datasets"].find_one({"_id": ObjectId(dataset_id)}) + dataset := await db["datasets"].find_one({"_id": ObjectId(dataset_id)}) ) is not None: query = {"resource.resource_id": ObjectId(dataset_id)} content = metadata_in.content @@ -210,9 +210,9 @@ async def update_dataset_metadata( if metadata_in.metadata_id is not None: # If a specific metadata_id is provided, validate the patch against existing context if ( - existing_md := await db["metadata"].find_one( - {"_id": ObjectId(metadata_in.metadata_id)} - ) + existing_md := await db["metadata"].find_one( + {"_id": ObjectId(metadata_in.metadata_id)} + ) ) is not None: content = await validate_context( db, @@ -232,12 +232,12 @@ async def update_dataset_metadata( # Filter by MetadataAgent if metadata_in.extractor is not None: if ( - listener := await db["listeners"].find_one( - { - "name": metadata_in.extractor.name, - "version": metadata_in.extractor.version, - } - ) + listener := await db["listeners"].find_one( + { + "name": metadata_in.extractor.name, + "version": metadata_in.extractor.version, + } + ) ) is not None: agent = MetadataAgent(creator=user, listener=listener) # TODO: How do we handle two different users creating extractor metadata? Currently we ignore user @@ -263,12 +263,12 @@ async def update_dataset_metadata( @router.get("/{dataset_id}/metadata", response_model=List[MetadataOut]) async def get_dataset_metadata( - dataset_id: str, - listener_name: Optional[str] = Form(None), - listener_version: Optional[float] = Form(None), - user=Depends(get_current_user), - db: MongoClient = Depends(dependencies.get_db), - allow: bool = Depends(Authorization("viewer")), + dataset_id: str, + listener_name: Optional[str] = Form(None), + listener_version: Optional[float] = Form(None), + user=Depends(get_current_user), + db: MongoClient = Depends(dependencies.get_db), + allow: bool = Depends(Authorization("viewer")), ): # if ( # dataset := await db["datasets"].find_one({"_id": ObjectId(dataset_id)}) @@ -287,9 +287,9 @@ async def get_dataset_metadata( md_out = MetadataOut.from_mongo(md) if md_out.definition is not None: if ( - md_def := await db["metadata.definitions"].find_one( - {"name": md_out.definition} - ) + md_def := await db["metadata.definitions"].find_one( + {"name": md_out.definition} + ) ) is not None: md_def = MetadataDefinitionOut(**md_def) md_out.description = md_def.description @@ -301,24 +301,24 @@ async def get_dataset_metadata( @router.delete("/{dataset_id}/metadata", response_model=MetadataOut) async def delete_dataset_metadata( - metadata_in: MetadataDelete, - dataset_id: str, - user=Depends(get_current_user), - db: MongoClient = Depends(dependencies.get_db), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - allow: bool = Depends(Authorization("editor")), + metadata_in: MetadataDelete, + dataset_id: str, + user=Depends(get_current_user), + db: MongoClient = Depends(dependencies.get_db), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + allow: bool = Depends(Authorization("editor")), ): if ( - dataset := await db["datasets"].find_one({"_id": ObjectId(dataset_id)}) + dataset := await db["datasets"].find_one({"_id": ObjectId(dataset_id)}) ) is not None: # filter by metadata_id or definition query = {"resource.resource_id": ObjectId(dataset_id)} if metadata_in.metadata_id is not None: # If a specific metadata_id is provided, delete the matching entry if ( - existing_md := await db["metadata"].find_one( - {"metadata_id": ObjectId(metadata_in.metadata_id)} - ) + existing_md := await db["metadata"].find_one( + {"metadata_id": ObjectId(metadata_in.metadata_id)} + ) ) is not None: query["metadata_id"] = metadata_in.metadata_id else: @@ -333,9 +333,9 @@ async def delete_dataset_metadata( extractor_info = metadata_in.extractor_info if extractor_info is not None: if ( - extractor := await db["listeners"].find_one( - {"name": extractor_info.name, "version": extractor_info.version} - ) + extractor := await db["listeners"].find_one( + {"name": extractor_info.name, "version": extractor_info.version} + ) ) is not None: agent = MetadataAgent(creator=user, extractor=extractor) # TODO: How do we handle two different users creating extractor metadata? Currently we ignore user From d70b0c1830c5f775a3c56fbce8d22c973b07b683 Mon Sep 17 00:00:00 2001 From: Max Burnette Date: Fri, 5 May 2023 10:53:40 -0500 Subject: [PATCH 17/32] add basic Feed support --- backend/app/main.py | 15 ++++++--------- backend/app/models/feeds.py | 12 ++++-------- backend/app/routers/feeds.py | 32 ++++++++++++-------------------- 3 files changed, 22 insertions(+), 37 deletions(-) diff --git a/backend/app/main.py b/backend/app/main.py index acc2aab4b..9e920bbe9 100644 --- a/backend/app/main.py +++ b/backend/app/main.py @@ -1,21 +1,17 @@ import logging -import random -import string -import time -from urllib.request import Request import uvicorn from beanie import init_beanie -from motor.motor_asyncio import AsyncIOMotorClient -from pydantic import BaseConfig from fastapi import FastAPI, APIRouter, Depends from fastapi.middleware.cors import CORSMiddleware +from motor.motor_asyncio import AsyncIOMotorClient +from pydantic import BaseConfig from app.config import settings +from app.keycloak_auth import get_current_username from app.models.authorization import AuthorizationDB from app.models.datasets import DatasetDB, DatasetDBViewList -from app.search.connect import connect_elasticsearch, create_index -from app.keycloak_auth import get_token, get_current_username +from app.models.feeds import FeedDB from app.routers import ( folders, groups, @@ -39,6 +35,7 @@ # setup loggers # logging.config.fileConfig('logging.conf', disable_existing_loggers=False) from app.search.config import indexSettings +from app.search.connect import connect_elasticsearch, create_index logger = logging.getLogger(__name__) @@ -170,7 +167,7 @@ async def startup_beanie(): await init_beanie( database=getattr(client, settings.MONGO_DATABASE), # Make sure to include all models. If one depends on another that is not in the list it is not clear which one is missing. - document_models=[DatasetDB, DatasetDBViewList, AuthorizationDB], + document_models=[DatasetDB, DatasetDBViewList, AuthorizationDB, FeedDB], recreate_views=True, ) diff --git a/backend/app/models/feeds.py b/backend/app/models/feeds.py index d6e5df0a5..f90eb2135 100644 --- a/backend/app/models/feeds.py +++ b/backend/app/models/feeds.py @@ -1,13 +1,12 @@ -from datetime import datetime -from typing import Optional, List +from typing import List import pymongo from beanie import Document -from pydantic import Field, BaseModel +from pydantic import BaseModel +from app.models.authorization import Provenance from app.models.listeners import FeedListener from app.models.search import SearchObject -from app.models.users import UserOut class JobFeed(BaseModel): @@ -27,10 +26,7 @@ class FeedIn(JobFeed): pass -class FeedDB(Document, JobFeed): - creator: Optional[UserOut] = None - updated: datetime = Field(default_factory=datetime.utcnow) - +class FeedDB(Document, JobFeed, Provenance): class Settings: name = "feeds_beanie" indexes = [ diff --git a/backend/app/routers/feeds.py b/backend/app/routers/feeds.py index ec738d31e..7c12e53d4 100644 --- a/backend/app/routers/feeds.py +++ b/backend/app/routers/feeds.py @@ -1,13 +1,12 @@ from typing import List, Optional -import pymongo from bson import ObjectId from fastapi import APIRouter, HTTPException, Depends from pika.adapters.blocking_connection import BlockingChannel from pymongo import MongoClient from app.dependencies import get_db -from app.keycloak_auth import get_current_user +from app.keycloak_auth import get_current_user, get_current_username from app.models.feeds import ( FeedIn, FeedDB, @@ -100,15 +99,13 @@ async def check_feed_listeners( @router.post("", response_model=FeedOut) async def save_feed( feed_in: FeedIn, - user=Depends(get_current_user), + user=Depends(get_current_username), db: MongoClient = Depends(get_db), ): """Create a new Feed (i.e. saved search) in the database.""" feed = FeedDB(**feed_in.dict(), creator=user) - new_feed = await db["feeds"].insert_one(feed.to_mongo()) - found = await db["feeds"].find_one({"_id": new_feed.inserted_id}) - feed_out = FeedOut.from_mongo(found) - return feed_out + new_feed = await feed.insert() + return await FeedDB.find_one(FeedDB.id == new_feed.id) @router.get("", response_model=List[FeedOut]) @@ -122,26 +119,21 @@ async def get_feeds( """Fetch all existing Feeds.""" feeds = [] if name is not None: - docs = ( - await db["feeds"] - .find({"name": name}) - .sort([("created", pymongo.DESCENDING)]) + return ( + await FeedDB.find(FeedDB.name == name) + .sort(-FeedDB.created) .skip(skip) .limit(limit) .to_list(length=limit) ) else: - docs = ( - await db["feeds"] - .find() - .sort([("created", pymongo.DESCENDING)]) + return ( + await FeedDB.find() + .sort(-FeedDB.created) .skip(skip) .limit(limit) .to_list(length=limit) ) - for doc in docs: - feeds.append(FeedOut.from_mongo(doc)) - return feeds @router.get("/{feed_id}", response_model=FeedOut) @@ -151,8 +143,8 @@ async def get_feed( db: MongoClient = Depends(get_db), ): """Fetch an existing saved search Feed.""" - if (feed := await db["feeds"].find_one({"_id": ObjectId(feed_id)})) is not None: - return FeedOut.from_mongo(feed) + if (feed := await FeedDB.find_one({FeedDB.id == ObjectId(feed_id)})) is not None: + return feed else: raise HTTPException(status_code=404, detail=f"Feed {feed_id} not found") From 9418b0238869cb7f52d73b2f659af7d32c83a615 Mon Sep 17 00:00:00 2001 From: Max Burnette Date: Fri, 5 May 2023 14:34:51 -0500 Subject: [PATCH 18/32] various updates --- backend/app/main.py | 1 + backend/app/models/authorization.py | 2 +- backend/app/models/feeds.py | 2 +- backend/app/models/listeners.py | 49 +++++++-- backend/app/rabbitmq/listeners.py | 57 +++------- backend/app/rabbitmq/message_listener_sync.py | 31 +++--- backend/app/routers/datasets.py | 2 - backend/app/routers/feeds.py | 104 +++++++----------- backend/app/routers/files.py | 68 ++++-------- backend/app/routers/jobs.py | 28 ++--- backend/app/routers/listeners.py | 51 ++++----- 11 files changed, 167 insertions(+), 228 deletions(-) diff --git a/backend/app/main.py b/backend/app/main.py index 9e920bbe9..0c253cf95 100644 --- a/backend/app/main.py +++ b/backend/app/main.py @@ -167,6 +167,7 @@ async def startup_beanie(): await init_beanie( database=getattr(client, settings.MONGO_DATABASE), # Make sure to include all models. If one depends on another that is not in the list it is not clear which one is missing. + # TODO: autogenerate this list if possible document_models=[DatasetDB, DatasetDBViewList, AuthorizationDB, FeedDB], recreate_views=True, ) diff --git a/backend/app/models/authorization.py b/backend/app/models/authorization.py index 3ebfa7241..cf9e8ea08 100644 --- a/backend/app/models/authorization.py +++ b/backend/app/models/authorization.py @@ -81,4 +81,4 @@ class AuthorizationDB(Document, AuthorizationBase, Provenance): """The creator of the Authorization object should also be the creator of the dataset itself.""" class Settings: - name = "authorization_beanie" + name = "authorization" diff --git a/backend/app/models/feeds.py b/backend/app/models/feeds.py index f90eb2135..7e28b4d40 100644 --- a/backend/app/models/feeds.py +++ b/backend/app/models/feeds.py @@ -28,7 +28,7 @@ class FeedIn(JobFeed): class FeedDB(Document, JobFeed, Provenance): class Settings: - name = "feeds_beanie" + name = "feeds" indexes = [ [ ("name", pymongo.TEXT), diff --git a/backend/app/models/listeners.py b/backend/app/models/listeners.py index 3616a8168..808bcf636 100644 --- a/backend/app/models/listeners.py +++ b/backend/app/models/listeners.py @@ -1,15 +1,18 @@ -from datetime import datetime, timedelta -from pydantic import Field, BaseModel, AnyUrl -from typing import Optional, List, Union +from datetime import datetime from enum import Enum +from typing import Optional, List, Union + +import pymongo +from beanie import Document +from pydantic import Field, BaseModel, AnyUrl from app.config import settings +from app.models.mongomodel import MongoDBRef from app.models.pyobjectid import PyObjectId -from app.models.mongomodel import MongoModel, MongoDBRef from app.models.users import UserOut -class Repository(MongoModel): +class Repository(BaseModel): """Reference to a repository associated with Event Listener/Extractor.""" repository_type: str = "git" @@ -40,7 +43,7 @@ class ExtractorInfo(BaseModel): class EventListenerBase(BaseModel): """An Event Listener is the expanded version of v1 Extractors.""" - author: str = "" + creator: str = "" name: str version: str = "1.0" description: str = "" @@ -60,7 +63,7 @@ class LegacyEventListenerIn(ExtractorInfo): description: str = "" -class EventListenerDB(EventListenerBase, MongoModel): +class EventListenerDB(Document, EventListenerBase): """EventListeners have a name, version, author, description, and optionally properties where extractor_info will be saved.""" creator: Optional[UserOut] = None @@ -68,6 +71,15 @@ class EventListenerDB(EventListenerBase, MongoModel): modified: datetime = Field(default_factory=datetime.now) properties: Optional[ExtractorInfo] = None + class Settings: + name = "listeners" + indexes = [ + [ + ("name", pymongo.TEXT), + ("description", pymongo.TEXT), + ], + ] + class EventListenerOut(EventListenerDB): pass @@ -97,7 +109,7 @@ class EventListenerJobStatus(str, Enum): RESUBMITTED = "RESUBMITTED" -class EventListenerJob(MongoModel): +class EventListenerJob(Document): """This summarizes a submission to an extractor. All messages from that extraction should include this job's ID.""" listener_id: str @@ -116,6 +128,16 @@ class Config: # required for Enum to properly work use_enum_values = True + class Settings: + name = "listener_jobs" + indexes = [ + [ + ("resource_ref.resource_id", PyObjectId), + ("listener_id", pymongo.TEXT), + ("status", pymongo.TEXT), + ], + ] + class EventListenerJobMessage(BaseModel): """This describes contents of JSON object that is submitted to RabbitMQ for the Event Listeners/Extractors to consume.""" @@ -146,9 +168,18 @@ class EventListenerDatasetJobMessage(BaseModel): job_id: str -class EventListenerJobUpdate(MongoModel): +class EventListenerJobUpdate(Document): """This is a status update message coming from the extractors back to Clowder.""" job_id: str timestamp: datetime = Field(default_factory=datetime.utcnow) status: str + + class Settings: + name = "listener_job_updates" + indexes = [ + [ + ("job_id", pymongo.TEXT), + ("status", pymongo.TEXT), + ], + ] diff --git a/backend/app/rabbitmq/listeners.py b/backend/app/rabbitmq/listeners.py index b7d0f411f..33543eda5 100644 --- a/backend/app/rabbitmq/listeners.py +++ b/backend/app/rabbitmq/listeners.py @@ -16,6 +16,7 @@ from app.models.users import UserOut from app.models.listeners import ( EventListenerJob, + EventListenerDB, EventListenerJobMessage, EventListenerDatasetJobMessage, ) @@ -56,18 +57,14 @@ async def create_reply_queue(): async def submit_file_job( file_out: FileOut, - queue: str, routing_key: str, parameters: dict, user: UserOut, - db: MongoClient, rabbitmq_client: BlockingChannel, - token: str, + token: str = Depends(get_token), ): - # TODO check if extractor is registered - # Create an entry in job history with unique ID - job = EventListenerJob( + job = EventListenerDB( listener_id=routing_key, creator=user, resource_ref=MongoDBRef( @@ -75,27 +72,17 @@ async def submit_file_job( ), parameters=parameters, ) - new_job = await db["listener_jobs"].insert_one(job.to_mongo()) - new_job_id = str(new_job.inserted_id) - - current_id = file_out.id - current_datasetId = file_out.dataset_id - current_secretKey = str(token) - try: - msg_body = EventListenerJobMessage( - filename=file_out.name, - fileSize=file_out.bytes, - id=str(current_id), - datasetId=str(current_datasetId), - secretKey=current_secretKey, - job_id=new_job_id, - ) - except Exception as e: - print(e) - print(new_job_id) + new_job = await job.save() + msg_body = EventListenerJobMessage( + filename=file_out.name, + fileSize=file_out.bytes, + id=str(file_out.id), + datasetId=str(file_out.dataset_id), + secretKey=token, + job_id=str(new_job.id), + ) reply_to = await create_reply_queue() - rabbitmq_client.basic_publish( exchange="", routing_key=routing_key, @@ -104,48 +91,40 @@ async def submit_file_job( content_type="application/json", delivery_mode=1, reply_to=reply_to ), ) - return new_job_id + return str(new_job.id) async def submit_dataset_job( dataset_out: DatasetOut, - queue: str, routing_key: str, parameters: dict, user: UserOut, token: str = Depends(get_token), - db: MongoClient = Depends(dependencies.get_db), rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), ): - # TODO check if extractor is registered - # Create an entry in job history with unique ID - job = EventListenerJob( + job = EventListenerDB( listener_id=routing_key, creator=user, resource_ref=MongoDBRef(collection="dataset", resource_id=dataset_out.id), parameters=parameters, ) - new_job = await db["listener_jobs"].insert_one(job.to_mongo()) - new_job_id = str(new_job.inserted_id) + new_job = await job.save() msg_body = EventListenerDatasetJobMessage( datasetName=dataset_out.name, id=str(dataset_out.id), datasetId=str(dataset_out.id), secretKey=token, - job_id=new_job_id, + job_id=str(new_job.id), ) - reply_to = await create_reply_queue() - rabbitmq_client.basic_publish( exchange="", routing_key=routing_key, body=json.dumps(msg_body.dict(), ensure_ascii=False), properties=pika.BasicProperties( - content_type="application/json", delivery_mode=1 + content_type="application/json", delivery_mode=1, reply_to=reply_to ), - # reply_to=reply_to ) - return new_job_id + return str(new_job.id) diff --git a/backend/app/rabbitmq/message_listener_sync.py b/backend/app/rabbitmq/message_listener_sync.py index b3fcdfab0..5212ecb88 100644 --- a/backend/app/rabbitmq/message_listener_sync.py +++ b/backend/app/rabbitmq/message_listener_sync.py @@ -13,6 +13,7 @@ from app.models.config import ConfigEntryDB, ConfigEntryOut from app.models.listeners import ( EventListenerJob, + EventListenerDB, EventListenerJobUpdate, EventListenerJobStatus, ) @@ -89,45 +90,43 @@ def callback(ch, method, properties, body): # TODO: Updating an event message could go in rabbitmq/listeners # Check if the job exists, and update if so - existing_job = db["listener_jobs"].find_one({"_id": ObjectId(job_id)}) - if existing_job is not None: + job = EventListenerDB.find_one(EventListenerDB.id == ObjectId(job_id)) + if job: # Update existing job with newest info - updated_job = EventListenerJob.from_mongo(existing_job) - updated_job.updated = timestamp + job.updated = timestamp parsed = parse_message_status(message_str) status = parsed["status"] cleaned_msg = parsed["cleaned_msg"] # Update the job timestamps/duration depending on what status we received update_duration = False - if status == EventListenerJobStatus.STARTED and updated_job.started is None: - updated_job.started = timestamp + if status == EventListenerJobStatus.STARTED and job.started is None: + job.started = timestamp elif ( status == EventListenerJobStatus.SUCCEEDED or status == EventListenerJobStatus.ERROR or status == EventListenerJobStatus.SKIPPED ): - updated_job.finished = timestamp + job.finished = timestamp update_duration = True elif ( status == EventListenerJobStatus.PROCESSING or status == EventListenerJobStatus.RESUBMITTED ): - updated_job.updated = timestamp + job.updated = timestamp update_duration = True - if update_duration and updated_job.started: - updated_job.duration = (timestamp - updated_job.started).total_seconds() - updated_job.status = status - updated_job.latest_message = cleaned_msg - db["listener_jobs"].replace_one( - {"_id": ObjectId(job_id)}, updated_job.to_mongo() - ) + if update_duration and job.started: + job.duration = (timestamp - job.started).total_seconds() + job.status = status + job.latest_message = cleaned_msg + # TODO: works if synchronous? + job.save() # Add latest message to the job updates event_msg = EventListenerJobUpdate( job_id=job_id, status=cleaned_msg, timestamp=timestamp ) - db["listener_job_updates"].insert_one(event_msg.to_mongo()) + event_msg.save() return True else: # We don't know what this job is. Reject the message. diff --git a/backend/app/routers/datasets.py b/backend/app/routers/datasets.py index a4c1f16dc..5d7a6406e 100644 --- a/backend/app/routers/datasets.py +++ b/backend/app/routers/datasets.py @@ -997,12 +997,10 @@ async def get_dataset_extract( job_id = await submit_dataset_job( dataset_out, - current_queue, current_routing_key, parameters, user, access_token, - db, rabbitmq_client, ) diff --git a/backend/app/routers/feeds.py b/backend/app/routers/feeds.py index 7c12e53d4..0e4034888 100644 --- a/backend/app/routers/feeds.py +++ b/backend/app/routers/feeds.py @@ -4,6 +4,7 @@ from fastapi import APIRouter, HTTPException, Depends from pika.adapters.blocking_connection import BlockingChannel from pymongo import MongoClient +from beanie.operators import NE from app.dependencies import get_db from app.keycloak_auth import get_current_user, get_current_username @@ -15,6 +16,7 @@ from app.models.files import FileOut from app.models.listeners import ( FeedListener, + EventListenerDB, EventListenerOut, ) from app.models.users import UserOut @@ -25,23 +27,19 @@ # TODO: Move this to MongoDB middle layer -async def disassociate_listener_db(feed_id: str, listener_id: str, db: MongoClient): +async def disassociate_listener_db(feed_id: str, listener_id: str): """Remove a specific Event Listener from a feed. Does not delete either resource, just removes relationship. This actually performs the database operations, and can be used by any endpoints that need this functionality. """ - async for feed in db["feeds"].find( - {"listeners.listener_id": ObjectId(listener_id)} - ): - feed_db = FeedDB.from_mongo(feed) + feed = FeedDB.find_one(FeedDB.id == ObjectId(feed_id)) + if feed: new_listeners = [] - for feed_listener in feed_db.listeners: + for feed_listener in feed.listeners: if feed_listener.listener_id != listener_id: new_listeners.append(feed_listener) - feed_db.listeners = new_listeners - await db["feeds"].replace_one( - {"_id": ObjectId(feed_id)}, FeedDB(**feed_db).to_mongo() - ) + feed.listeners = new_listeners + await feed.save() async def check_feed_listeners( @@ -54,45 +52,29 @@ async def check_feed_listeners( ): """Automatically submit new file to listeners on feeds that fit the search criteria.""" listeners_found = [] - async for feed in db["feeds"].find({"listeners": {"$ne": []}}): - feed_db = FeedDB(**feed) - - # If feed doesn't have any auto-triggering listeners, we're done - found_auto = False - for listener in feed_db.listeners: - if listener.automatic: - found_auto = True - break - - if found_auto: + feeds = await FeedDB.find(NE(FeedDB.listeners, [])) + for feed in feeds: + # Only proceed if feed actually has auto-triggering listeners + if any(map(lambda li: li.automatic, feed.listeners)): # Verify whether resource_id is found when searching the specified criteria - feed_match = check_search_result(es_client, file_out, feed_db.search) + feed_match = check_search_result(es_client, file_out, feed.search) if feed_match: - for listener in feed_db.listeners: + for listener in feed.listeners: if listener.automatic: listeners_found.append(listener.listener_id) - for targ_listener in listeners_found: - if ( - listener_db := await db["listeners"].find_one( - {"_id": ObjectId(targ_listener)} - ) - ) is not None: - listener_info = EventListenerOut.from_mongo(listener_db) - queue = listener_info.name - routing_key = listener_info.name - parameters = {} + listener_info = EventListenerDB.find( + EventListenerDB.id == ObjectId(targ_listener) + ) + if listener_info: await submit_file_job( file_out, - queue, - routing_key, - parameters, + listener_info.name, # routing_key + {}, # parameters user, - db, rabbitmq_client, token, ) - return listeners_found @@ -100,24 +82,20 @@ async def check_feed_listeners( async def save_feed( feed_in: FeedIn, user=Depends(get_current_username), - db: MongoClient = Depends(get_db), ): """Create a new Feed (i.e. saved search) in the database.""" feed = FeedDB(**feed_in.dict(), creator=user) - new_feed = await feed.insert() - return await FeedDB.find_one(FeedDB.id == new_feed.id) + return await feed.save() @router.get("", response_model=List[FeedOut]) async def get_feeds( name: Optional[str] = None, user=Depends(get_current_user), - db: MongoClient = Depends(get_db), skip: int = 0, limit: int = 10, ): """Fetch all existing Feeds.""" - feeds = [] if name is not None: return ( await FeedDB.find(FeedDB.name == name) @@ -140,10 +118,10 @@ async def get_feeds( async def get_feed( feed_id: str, user=Depends(get_current_user), - db: MongoClient = Depends(get_db), ): """Fetch an existing saved search Feed.""" - if (feed := await FeedDB.find_one({FeedDB.id == ObjectId(feed_id)})) is not None: + feed = await FeedDB.find_one(FeedDB.id == ObjectId(feed_id)) + if feed: return feed else: raise HTTPException(status_code=404, detail=f"Feed {feed_id} not found") @@ -153,14 +131,13 @@ async def get_feed( async def delete_feed( feed_id: str, user=Depends(get_current_user), - db: MongoClient = Depends(get_db), ): """Delete an existing saved search Feed.""" - if (await db["feeds"].find_one({"_id": ObjectId(feed_id)})) is not None: - await db["feeds"].delete_one({"_id": ObjectId(feed_id)}) + feed = await FeedDB.find_one(FeedDB.id == ObjectId(feed_id)) + if feed: + await FeedDB.delete(FeedDB.id == ObjectId(feed_id)) return {"deleted": feed_id} - else: - raise HTTPException(status_code=404, detail=f"Feed {feed_id} not found") + raise HTTPException(status_code=404, detail=f"Feed {feed_id} not found") @router.post("/{feed_id}/listeners", response_model=FeedOut) @@ -168,7 +145,6 @@ async def associate_listener( feed_id: str, listener: FeedListener, user=Depends(get_current_user), - db: MongoClient = Depends(get_db), ): """Associate an existing Event Listener with a Feed, e.g. so it will be triggered on new Feed results. @@ -176,18 +152,14 @@ async def associate_listener( feed_id: Feed that should have new Event Listener associated listener: JSON object with "listener_id" field and "automatic" bool field (whether to auto-trigger on new data) """ - if (feed := await db["feeds"].find_one({"_id": ObjectId(feed_id)})) is not None: - feed_out = FeedOut.from_mongo(feed) - if ( - listener_q := await db["listeners"].find_one( - {"_id": ObjectId(listener.listener_id)} - ) - ) is not None: - feed_out.listeners.append(listener) - await db["feeds"].replace_one( - {"_id": ObjectId(feed_id)}, FeedDB(**feed_out.dict()).to_mongo() - ) - return feed_out + feed = await FeedDB.find_one(FeedDB.id == ObjectId(feed_id)) + if feed: + exists = await EventListenerDB.find_one( + EventListenerDB.id == ObjectId(listener.listener_id) + ) + if exists: + feed.listeners.append(listener) + return await feed.save() raise HTTPException( status_code=404, detail=f"listener {listener.listener_id} not found" ) @@ -199,7 +171,6 @@ async def disassociate_listener( feed_id: str, listener_id: str, user=Depends(get_current_user), - db: MongoClient = Depends(get_db), ): """Disassociate an Event Listener from a Feed. @@ -207,7 +178,8 @@ async def disassociate_listener( feed_id: UUID of search Feed that is being changed listener_id: UUID of Event Listener that should be disassociated """ - if (feed := await db["feeds"].find_one({"_id": ObjectId(feed_id)})) is not None: - disassociate_listener_db(feed_id, listener_id, db) + feed = await FeedDB.find_one(FeedDB.id == ObjectId(feed_id)) + if feed: + await disassociate_listener_db(feed_id, listener_id) return {"disassociated": listener_id} raise HTTPException(status_code=404, detail=f"feed {feed_id} not found") diff --git a/backend/app/routers/files.py b/backend/app/routers/files.py index 3cc540bb0..c2bcd9a1c 100644 --- a/backend/app/routers/files.py +++ b/backend/app/routers/files.py @@ -1,44 +1,37 @@ import io -import json - -from elasticsearch import Elasticsearch -import pika import mimetypes from datetime import datetime -from typing import Optional, List, BinaryIO +from typing import Optional, List +from typing import Union + from bson import ObjectId +from elasticsearch import Elasticsearch +from fastapi import APIRouter, HTTPException, Depends, Security from fastapi import ( - APIRouter, - HTTPException, - Depends, File, - Form, UploadFile, Request, ) -from fastapi import APIRouter, HTTPException, Depends, Security -from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer from fastapi.responses import StreamingResponse +from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer from minio import Minio from pika.adapters.blocking_connection import BlockingChannel from pymongo import MongoClient from app import dependencies -from app.deps.authorization_deps import FileAuthorization from app.config import settings +from app.deps.authorization_deps import FileAuthorization +from app.keycloak_auth import get_current_user, get_token +from app.models.files import FileOut, FileVersion, FileContentType, FileDB +from app.models.users import UserOut +from app.rabbitmq.listeners import submit_file_job, EventListenerJob +from app.routers.feeds import check_feed_listeners from app.search.connect import ( insert_record, delete_document_by_id, update_record, delete_document_by_query, ) -from app.models.files import FileIn, FileOut, FileVersion, FileContentType, FileDB -from app.models.users import UserOut -from app.routers.feeds import check_feed_listeners -from app.keycloak_auth import get_user, get_current_user, get_token -from app.rabbitmq.listeners import submit_file_job, submit_file_job -from typing import Union -from app.models.metadata import MetadataOut router = APIRouter() security = HTTPBearer() @@ -62,32 +55,21 @@ async def _resubmit_file_extractors( rabbitmq_client: Rabbitmq Client """ - previous_version = file.version_num - 1 - query = { - "resource_ref.resource_id": ObjectId(file.id), - "resource_ref.version": previous_version, - } - listeners_resubmitted = [] - listeners_resubitted_failed = [] resubmitted_jobs = [] - async for job in db["listener_jobs"].find(query): - current_job = job - job_listener_queue = job["listener_id"] - job_parameters = job["parameters"] - resubmitted_job = { - "listener_id": job_listener_queue, - "parameters": job_parameters, - } + jobs = await EventListenerJob.find( + EventListenerJob.resource_ref.resource_id == ObjectId(file.id), + EventListenerJob.resource_ref.version == file.version_num - 1, + ) + for job in jobs: + resubmitted_job = {"listener_id": job.listener_id, "parameters": job.parameters} try: - routing_key = job_listener_queue + routing_key = job.listener_id access_token = credentials.credentials await submit_file_job( file, - job_listener_queue, routing_key, - job_parameters, + job.parameters, user, - db, rabbitmq_client, access_token, ) @@ -96,7 +78,6 @@ async def _resubmit_file_extractors( except Exception as e: resubmitted_job["status"] = "error" resubmitted_jobs.append(resubmitted_job) - return resubmitted_jobs @@ -404,7 +385,6 @@ async def get_file_versions( # submits file to extractor -# can handle parameters pass in as key/values in info @router.post("/{file_id}/extract") async def get_file_extract( file_id: str, @@ -427,22 +407,16 @@ async def get_file_extract( # backward compatibility? Get extractor info from request (Clowder v1) queue = extractorName routing_key = queue - if parameters is None: parameters = {} - - job_id = await submit_file_job( + return await submit_file_job( file_out, - queue, routing_key, parameters, user, - db, rabbitmq_client, access_token, ) - - return job_id else: raise HTTPException(status_code=404, detail=f"File {file_id} not found") diff --git a/backend/app/routers/jobs.py b/backend/app/routers/jobs.py index ece605218..0df624f86 100644 --- a/backend/app/routers/jobs.py +++ b/backend/app/routers/jobs.py @@ -8,7 +8,7 @@ from app import dependencies from app.models.listeners import EventListenerJob, EventListenerJobUpdate -from app.keycloak_auth import get_current_user, get_user +from app.keycloak_auth import get_current_user, get_user, get_current_username router = APIRouter() @@ -86,29 +86,23 @@ async def get_all_job_summary( @router.get("/{job_id}/summary", response_model=EventListenerJob) async def get_job_summary( job_id: str, - db: MongoClient = Depends(dependencies.get_db), + user=Depends(get_current_username), ): - if ( - job := await db["listener_jobs"].find_one({"_id": ObjectId(job_id)}) - ) is not None: - return EventListenerJob.from_mongo(job) - + job = await EventListenerJob.find_one(EventListenerJob.id == ObjectId(job_id)) + if job: + return job raise HTTPException(status_code=404, detail=f"Job {job_id} not found") @router.get("/{job_id}/updates") async def get_job_updates( job_id: str, - db: MongoClient = Depends(dependencies.get_db), + user=Depends(get_current_username), ): - if ( - job := await db["listener_jobs"].find_one({"_id": ObjectId(job_id)}) - ) is not None: + job = await EventListenerJob.find_one(EventListenerJob.id == ObjectId(job_id)) + if job: # TODO: Should this also return the job summary data since we just queried it here? - events = [] - async for update in db["listener_job_updates"].find({"job_id": job_id}): - event_json = EventListenerJobUpdate.from_mongo(update) - events.append(event_json) - return events - + return await EventListenerJobUpdate.find( + EventListenerJobUpdate.job_id == job_id + ) raise HTTPException(status_code=404, detail=f"Job {job_id} not found") diff --git a/backend/app/routers/listeners.py b/backend/app/routers/listeners.py index cba4b5485..d30509c71 100644 --- a/backend/app/routers/listeners.py +++ b/backend/app/routers/listeners.py @@ -1,19 +1,19 @@ import datetime import os -import re import random +import re import string -from packaging import version from typing import List, Optional + from bson import ObjectId from fastapi import APIRouter, HTTPException, Depends +from packaging import version from pymongo import MongoClient from app.dependencies import get_db -from app.keycloak_auth import get_user, get_current_user -from app.models.feeds import FeedDB, FeedOut, FeedListener +from app.keycloak_auth import get_user, get_current_user, get_current_username from app.models.config import ConfigEntryDB, ConfigEntryOut -from app.models.search import SearchCriteria +from app.models.feeds import FeedDB, FeedListener from app.models.listeners import ( ExtractorInfo, EventListenerIn, @@ -21,6 +21,7 @@ EventListenerDB, EventListenerOut, ) +from app.models.search import SearchCriteria from app.routers.feeds import disassociate_listener_db router = APIRouter() @@ -242,7 +243,6 @@ async def get_listeners( category -- filter by category has to be exact match label -- filter by label has to be exact match """ - listeners = [] if category and label: query = { "$and": [ @@ -257,11 +257,9 @@ async def get_listeners( else: query = {} - for doc in ( - await db["listeners"].find(query).skip(skip).limit(limit).to_list(length=limit) - ): - listeners.append(EventListenerOut.from_mongo(doc)) - return listeners + return ( + await EventListenerDB.find(query).skip(skip).limit(limit).to_list(length=limit) + ) @router.put("/{listener_id}", response_model=EventListenerOut) @@ -277,38 +275,31 @@ async def edit_listener( listener_id -- UUID of the listener to be udpated listener_in -- JSON object including updated information """ - if ( - listener := await db["listeners"].find_one({"_id": ObjectId(listener_id)}) - ) is not None: + listener = EventListenerDB.find_one(EventListenerDB.id == ObjectId(listener_id)) + if listener: # TODO: Refactor this with permissions checks etc. listener_update = dict(listener_in) if listener_in is not None else {} - user = await db["users"].find_one({"_id": ObjectId(user_id)}) - listener_update["updated"] = datetime.datetime.utcnow() + listener_update["modified"] = datetime.datetime.utcnow() try: listener.update(listener_update) - await db["listeners"].replace_one( - {"_id": ObjectId(listener_id)}, EventListenerDB(**listener).to_mongo() - ) + return await EventListenerDB(**listener).save() except Exception as e: raise HTTPException(status_code=500, detail=e.args[0]) - return EventListenerOut.from_mongo(listener) raise HTTPException(status_code=404, detail=f"listener {listener_id} not found") @router.delete("/{listener_id}") async def delete_listener( listener_id: str, - db: MongoClient = Depends(get_db), + user=Depends(get_current_username), ): """Remove an Event Listener from the database. Will not clear event history for the listener.""" - if (await db["listeners"].find_one({"_id": ObjectId(listener_id)})) is not None: + listener = EventListenerDB.find(EventListenerDB.id == ObjectId(listener_id)) + if listener: # unsubscribe the listener from any feeds - async for feed in db["feeds"].find( - {"listeners.listener_id": ObjectId(listener_id)} - ): - feed_out = FeedOut.from_mongo(feed) - disassociate_listener_db(feed_out.id, listener_id, db) - await db["listeners"].delete_one({"_id": ObjectId(listener_id)}) + feeds = FeedDB.find(FeedDB.listeners.listener_id == ObjectId(listener_id)) + for feed in feeds: + await disassociate_listener_db(feed.id, listener_id) + await listener.delete() return {"deleted": listener_id} - else: - raise HTTPException(status_code=404, detail=f"listener {listener_id} not found") + raise HTTPException(status_code=404, detail=f"Listener {listener_id} not found") From 3360c6a2223f714e3af614497b11b6c50905eb35 Mon Sep 17 00:00:00 2001 From: Max Burnette Date: Mon, 8 May 2023 09:24:59 -0500 Subject: [PATCH 19/32] introduce listener views --- backend/app/models/datasets.py | 6 +- backend/app/models/listeners.py | 159 +++++++++++++++++- .../app/rabbitmq/heartbeat_listener_sync.py | 4 +- backend/app/routers/jobs.py | 19 +-- backend/app/routers/listeners.py | 115 +++++-------- 5 files changed, 210 insertions(+), 93 deletions(-) diff --git a/backend/app/models/datasets.py b/backend/app/models/datasets.py index b17218927..70fa80960 100644 --- a/backend/app/models/datasets.py +++ b/backend/app/models/datasets.py @@ -46,7 +46,7 @@ class DatasetDB(Document, DatasetBase): downloads: int = 0 class Settings: - name = "datasets_beanie" + name = "datasets" indexes = [ [ ("name", pymongo.TEXT), @@ -58,14 +58,14 @@ class Settings: class DatasetDBViewList(View, DatasetBase): # FIXME This seems to be required to return _id. Otherwise _id is null in the response. id: PydanticObjectId = Field(None, alias="_id") - author: UserOut + creator: UserOut created: datetime = Field(default_factory=datetime.utcnow) modified: datetime = Field(default_factory=datetime.utcnow) auth: List[AuthorizationDB] class Settings: source = DatasetDB - name = "datasets_beanie_view" + name = "datasets_view" pipeline = [ { "$lookup": { diff --git a/backend/app/models/listeners.py b/backend/app/models/listeners.py index 808bcf636..1e98bb10c 100644 --- a/backend/app/models/listeners.py +++ b/backend/app/models/listeners.py @@ -3,12 +3,13 @@ from typing import Optional, List, Union import pymongo -from beanie import Document +from beanie import Document, View, PydanticObjectId from pydantic import Field, BaseModel, AnyUrl from app.config import settings from app.models.mongomodel import MongoDBRef from app.models.pyobjectid import PyObjectId +from app.models.authorization import AuthorizationDB from app.models.users import UserOut @@ -183,3 +184,159 @@ class Settings: ("status", pymongo.TEXT), ], ] + + +class EventListenerJobViewList(View, EventListenerJob): + """Get associated resource information for each job""" + + # FIXME This seems to be required to return _id. Otherwise _id is null in the response. + id: PydanticObjectId = Field(None, alias="_id") + creator: UserOut + created: datetime = Field(default_factory=datetime.utcnow) + modified: datetime = Field(default_factory=datetime.utcnow) + auth: List[AuthorizationDB] + + class Settings: + source = EventListenerJob + name = "listener_jobs_view" + pipeline = [ + { + "$facet": { + "extraction_on_dataset": [ + {"$match": {"resource_ref.collection": {"$eq": "dataset"}}}, + { + "$lookup": { + "from": "authorization", + "localField": "resource_ref.resource_id", + "foreignField": "dataset_id", + "as": "auth", + } + }, + ], + "extraction_on_file": [ + {"$match": {"resource_ref.collection": {"$eq": "file"}}}, + { + "$lookup": { + "from": "files", + "localField": "resource_ref.resource_id", + "foreignField": "_id", + "as": "file_details", + } + }, + { + "$lookup": { + "from": "authorization", + "localField": "file_details.dataset_id", + "foreignField": "dataset_id", + "as": "auth", + } + }, + ], + } + }, + { + "$project": { + "all": { + "$concatArrays": [ + "$extraction_on_dataset", + "$extraction_on_file", + ] + } + } + }, + {"$unwind": "$all"}, + {"$replaceRoot": {"newRoot": "$all"}}, + ] + # Needs fix to work https://github.com/roman-right/beanie/pull/521 + # use_cache = True + # cache_expiration_time = timedelta(seconds=10) + # cache_capacity = 5 + + +class EventListenerJobUpdateViewList(View, EventListenerJob): + """Get associated resource information for each job update""" + + # FIXME This seems to be required to return _id. Otherwise _id is null in the response. + id: PydanticObjectId = Field(None, alias="_id") + creator: UserOut + created: datetime = Field(default_factory=datetime.utcnow) + modified: datetime = Field(default_factory=datetime.utcnow) + auth: List[AuthorizationDB] + + class Settings: + source = EventListenerJob + name = "listener_jobs_view" + pipeline = ( + [ + { + "$lookup": { # Equality Match + "from": "listener_jobs", + "localField": "job_id", + "foreignField": "_id", + "as": "listener_job_details", + } + }, + { + "$facet": { + "extraction_on_dataset": [ + { + "$match": { + "listener_job_details.resource_ref.collection": { + "$eq": "dataset" + } + } + }, + { + "$lookup": { + "from": "authorization", + "localField": "listener_job_details.resource_ref.resource_id", + "foreignField": "dataset_id", + "as": "auth", + } + }, + ], + "extraction_on_file": [ + { + "$match": { + "listener_job_details.resource_ref.collection": { + "$eq": "file" + } + } + }, + { + "$lookup": { + "from": "files", + "localField": "listener_job_details.resource_ref.resource_id", + "foreignField": "_id", + "as": "file_details", + } + }, + { + "$lookup": { + "from": "authorization", + "localField": "file_details.dataset_id", + "foreignField": "dataset_id", + "as": "auth", + } + }, + ], + } + }, + { + "$project": { + "all": { + "$concatArrays": [ + "$extraction_on_dataset", + "$extraction_on_file", + ] + } + } + }, + {"$unwind": "$all"}, + {"$replaceRoot": {"newRoot": "$all"}}, + ], + ) + # Needs fix to work https://github.com/roman-right/beanie/pull/521 + # use_cache = True + # cache_expiration_time = timedelta(seconds=10) + # cache_capacity = 5 diff --git a/backend/app/rabbitmq/heartbeat_listener_sync.py b/backend/app/rabbitmq/heartbeat_listener_sync.py index c99bdba7b..2a6a565e4 100644 --- a/backend/app/rabbitmq/heartbeat_listener_sync.py +++ b/backend/app/rabbitmq/heartbeat_listener_sync.py @@ -60,8 +60,8 @@ def callback(ch, method, properties, body): # Assign MIME-based listener if needed if extractor_out.properties and extractor_out.properties.process: process = extractor_out.properties.process - processed_feed = _process_incoming_v1_extractor_info( - extractor_name, extractor_out.id, process, db + processed_feed = await _process_incoming_v1_extractor_info( + extractor_name, extractor_out.id, process ) db["feeds"].insert_one(processed_feed) diff --git a/backend/app/routers/jobs.py b/backend/app/routers/jobs.py index 0df624f86..85da839f3 100644 --- a/backend/app/routers/jobs.py +++ b/backend/app/routers/jobs.py @@ -7,7 +7,11 @@ from pymongo import MongoClient from app import dependencies -from app.models.listeners import EventListenerJob, EventListenerJobUpdate +from app.models.listeners import ( + EventListenerJob, + EventListenerJobUpdate, + EventListenerJobViewList, +) from app.keycloak_auth import get_current_user, get_user, get_current_username router = APIRouter() @@ -16,7 +20,6 @@ @router.get("", response_model=List[EventListenerJob]) async def get_all_job_summary( current_user_id=Depends(get_user), - db: MongoClient = Depends(dependencies.get_db), listener_id: Optional[str] = None, status: Optional[str] = None, user_id: Optional[str] = None, @@ -38,7 +41,6 @@ async def get_all_job_summary( skip -- number of initial records to skip (i.e. for pagination) limit -- restrict number of records to be returned (i.e. for pagination) """ - jobs = [] filters = [ { "$or": [ @@ -70,17 +72,12 @@ async def get_all_job_summary( filters.append({"resource_ref.collection": "dataset"}) filters.append({"resource_ref.resource_id": ObjectId(dataset_id)}) - query = {"$and": filters} - - for doc in ( - await db["listener_jobs_view"] - .find(query) + return ( + await EventListenerJobViewList.find({"$and": filters}) .skip(skip) .limit(limit) .to_list(length=limit) - ): - jobs.append(EventListenerJob.from_mongo(doc)) - return jobs + ) @router.get("/{job_id}/summary", response_model=EventListenerJob) diff --git a/backend/app/routers/listeners.py b/backend/app/routers/listeners.py index d30509c71..9998e1a70 100644 --- a/backend/app/routers/listeners.py +++ b/backend/app/routers/listeners.py @@ -30,11 +30,10 @@ clowder_bucket = os.getenv("MINIO_BUCKET_NAME", "clowder") -def _process_incoming_v1_extractor_info( +async def _process_incoming_v1_extractor_info( extractor_name: str, extractor_id: str, process: dict, - db: MongoClient, ): if "file" in process: # Create a MIME-based feed for this v1 extractor @@ -66,8 +65,8 @@ def _process_incoming_v1_extractor_info( }, listeners=[FeedListener(listener_id=extractor_id, automatic=True)], ) - return new_feed.to_mongo() - db["feeds"].insert_one(new_feed.to_mongo()) + await new_feed.save() + return new_feed @router.get("/instance") @@ -101,22 +100,18 @@ async def get_instance_id( async def save_listener( listener_in: EventListenerIn, user=Depends(get_current_user), - db: MongoClient = Depends(get_db), ): """Register a new Event Listener with the system.""" listener = EventListenerDB(**listener_in.dict(), creator=user) # TODO: Check for duplicates somehow? - new_listener = await db["listeners"].insert_one(listener.to_mongo()) - found = await db["listeners"].find_one({"_id": new_listener.inserted_id}) - listener_out = EventListenerOut.from_mongo(found) - return listener_out + await listener.save() + return listener @legacy_router.post("", response_model=EventListenerOut) async def save_legacy_listener( legacy_in: LegacyEventListenerIn, user=Depends(get_current_user), - db: MongoClient = Depends(get_db), ): """This will take a POST with Clowder v1 extractor_info included, and convert/update to a v2 Listener.""" listener_properties = ExtractorInfo(**legacy_in.dict()) @@ -128,48 +123,32 @@ async def save_legacy_listener( properties=listener_properties, ) - # check to see if extractor already exists and update if so - existing_extractor = await db["listeners"].find_one({"name": legacy_in.name}) - if existing_extractor is not None: - # Update existing listener - extractor_out = EventListenerOut.from_mongo(existing_extractor) - existing_version = extractor_out.version - new_version = listener.version - if version.parse(new_version) > version.parse(existing_version): - # if this is a new version, add it to the database - new_extractor = await db["listeners"].insert_one(listener.to_mongo()) - found = await db["listeners"].find_one({"_id": new_extractor.inserted_id}) - # TODO - for now we are not deleting an older version of the extractor, just adding a new one - # removed = db["listeners"].delete_one({"_id": existing_extractor["_id"]}) - extractor_out = EventListenerOut.from_mongo(found) - return extractor_out + # check to see if extractor already exists and update if so, otherwise return existing + existing = await EventListenerDB.find_one(EventListenerDB.name == legacy_in.name) + if existing: + # if this is a new version, add it to the database, otherwise update existing + if version.parse(listener.version) > version.parse(existing.version): + await listener.save() + # TODO: Should older extractor version entries be deleted? + # await EventListenerDB.delete(EventListenerDB.id == existing.id) + return listener else: - # otherwise return existing version # TODO: Should this fail the POST instead? - return extractor_out + return existing else: # Register new listener - new_listener = await db["listeners"].insert_one(listener.to_mongo()) - found = await db["listeners"].find_one({"_id": new_listener.inserted_id}) - listener_out = EventListenerOut.from_mongo(found) - - # Assign MIME-based listener if needed - if listener_out.properties and listener_out.properties.process: - process = listener_out.properties.process - processed_feed = _process_incoming_v1_extractor_info( - legacy_in.name, listener_out.id, process, db + await listener.save() + # Assign a MIME-based listener if necessary + if listener.properties and listener.properties.process: + await _process_incoming_v1_extractor_info( + legacy_in.name, listener.id, listener.properties.process ) - await db["feeds"].insert_one(processed_feed) - - return listener_out + return listener @router.get("/search", response_model=List[EventListenerOut]) async def search_listeners( - db: MongoClient = Depends(get_db), - text: str = "", - skip: int = 0, - limit: int = 2, + text: str = "", skip: int = 0, limit: int = 2, user=Depends(get_current_username) ): """Search all Event Listeners in the db based on text. @@ -178,58 +157,42 @@ async def search_listeners( skip -- number of initial records to skip (i.e. for pagination) limit -- restrict number of records to be returned (i.e. for pagination) """ - listeners = [] - query_regx = re.compile(text, re.IGNORECASE) - - for doc in ( - # TODO either use regex or index search - await db["listeners"] - .find({"$or": [{"name": query_regx}, {"description": query_regx}]}) + # TODO either use regex or index search + return ( + await EventListenerDB.find( + {"$or": [{"name": query_regx}, {"description": query_regx}]} + ) .skip(skip) .limit(limit) .to_list(length=limit) - ): - listeners.append(EventListenerOut.from_mongo(doc)) - return listeners + ) @router.get("/categories", response_model=List[str]) -async def list_categories( - db: MongoClient = Depends(get_db), -): - """Get all the distinct categories of registered listeners in the db - - Arguments: - """ - return await db["listeners"].distinct("properties.categories") +async def list_categories(user=Depends(get_current_username)): + """Get all the distinct categories of registered listeners in the db""" + return await EventListenerDB.distinct(EventListenerDB.properties.categories) @router.get("/defaultLabels", response_model=List[str]) -async def list_default_labels( - db: MongoClient = Depends(get_db), -): - """Get all the distinct default labels of registered listeners in the db - - Arguments: - """ - return await db["listeners"].distinct("properties.defaultLabels") +async def list_default_labels(user=Depends(get_current_username)): + """Get all the distinct default labels of registered listeners in the db""" + return await EventListenerDB.distinct(EventListenerDB.properties.default_labels) @router.get("/{listener_id}", response_model=EventListenerOut) -async def get_listener(listener_id: str, db: MongoClient = Depends(get_db)): +async def get_listener(listener_id: str, user=Depends(get_current_username)): """Return JSON information about an Event Listener if it exists.""" - if ( - listener := await db["listeners"].find_one({"_id": ObjectId(listener_id)}) - ) is not None: - return EventListenerOut.from_mongo(listener) + listener = EventListenerDB.find_one(EventListenerDB.id == ObjectId(listener_id)) + if listener: + return listener raise HTTPException(status_code=404, detail=f"listener {listener_id} not found") @router.get("", response_model=List[EventListenerOut]) async def get_listeners( - user_id=Depends(get_user), - db: MongoClient = Depends(get_db), + user_id=Depends(get_current_username), skip: int = 0, limit: int = 2, category: Optional[str] = None, From 838506c9cfc30a262a675232159f4fbb1a0e1e2c Mon Sep 17 00:00:00 2001 From: Max Burnette Date: Mon, 8 May 2023 09:34:01 -0500 Subject: [PATCH 20/32] update config model --- backend/app/models/config.py | 4 ++-- backend/app/routers/listeners.py | 15 ++++----------- 2 files changed, 6 insertions(+), 13 deletions(-) diff --git a/backend/app/models/config.py b/backend/app/models/config.py index 41927eb41..d6a105162 100644 --- a/backend/app/models/config.py +++ b/backend/app/models/config.py @@ -1,7 +1,7 @@ -from app.models.mongomodel import MongoModel +from beanie import Document -class ConfigEntryBase(MongoModel): +class ConfigEntryBase(Document): key: str value: str diff --git a/backend/app/routers/listeners.py b/backend/app/routers/listeners.py index 9998e1a70..5337d535e 100644 --- a/backend/app/routers/listeners.py +++ b/backend/app/routers/listeners.py @@ -74,13 +74,9 @@ async def get_instance_id( user=Depends(get_current_user), db: MongoClient = Depends(get_db), ): - # Check all connection and abort if any one of them is not available - if db is None: - raise HTTPException(status_code=503, detail="Service not available") - return - - if (instance_id := await db["config"].find_one({"key": "instance_id"})) is not None: - return ConfigEntryOut.from_mongo(instance_id).value + instance_id = await ConfigEntryDB.find_one({ConfigEntryDB.key == "instance_id"}) + if instance_id: + return instance_id.value else: # If no ID has been generated for this instance, generate a 10-digit alphanumeric identifier instance_id = "".join( @@ -90,10 +86,7 @@ async def get_instance_id( for _ in range(10) ) config_entry = ConfigEntryDB(key="instance_id", value=instance_id) - await db["config"].insert_one(config_entry.to_mongo()) - found = await db["config"].find_one({"key": "instance_id"}) - new_entry = ConfigEntryOut.from_mongo(found) - return instance_id + return await config_entry.save() @router.post("", response_model=EventListenerOut) From e6c9703bc97de77257ea5ee5b0d3f27e44c5e728 Mon Sep 17 00:00:00 2001 From: Max Burnette Date: Mon, 8 May 2023 10:58:29 -0500 Subject: [PATCH 21/32] More updates to metadata models --- backend/app/deps/authorization_deps.py | 34 ++-- backend/app/models/config.py | 9 ++ backend/app/models/metadata.py | 103 ++++-------- backend/app/routers/datasets.py | 69 +++----- backend/app/routers/metadata.py | 95 +++++------ backend/app/routers/metadata_datasets.py | 194 ++++++++++------------- backend/app/routers/metadata_files.py | 177 ++++++++++----------- 7 files changed, 290 insertions(+), 391 deletions(-) diff --git a/backend/app/deps/authorization_deps.py b/backend/app/deps/authorization_deps.py index 781eff613..74a672f94 100644 --- a/backend/app/deps/authorization_deps.py +++ b/backend/app/deps/authorization_deps.py @@ -6,10 +6,10 @@ from app.dependencies import get_db from app.keycloak_auth import get_current_username from app.models.authorization import RoleType, AuthorizationDB -from app.models.datasets import DatasetOut +from app.models.datasets import DatasetDB from app.models.files import FileOut from app.models.groups import GroupOut -from app.models.metadata import MetadataOut +from app.models.metadata import MetadataDB from app.models.pyobjectid import PyObjectId @@ -74,10 +74,8 @@ async def get_role_by_metadata( db: MongoClient = Depends(get_db), current_user=Depends(get_current_username), ) -> RoleType: - if ( - metadata := await db["metadata"].find_one({"_id": ObjectId(metadata_id)}) - ) is not None: - md_out = MetadataOut.from_mongo(metadata) + md_out = await MetadataDB.find_one(MetadataDB.id == ObjectId(metadata_id)) + if md_out: resource_type = md_out.resource.collection resource_id = md_out.resource.resource_id if resource_type == "files": @@ -101,10 +99,10 @@ async def get_role_by_metadata( role = AuthorizationDB.from_mongo(authorization).role return role elif resource_type == "datasets": - if ( - dataset := await db["datasets"].find_one({"_id": ObjectId(resource_id)}) - ) is not None: - dataset_out = DatasetOut.from_mongo(dataset) + dataset_out = await DatasetDB.find_one( + DatasetDB.id == ObjectId(resource_id) + ) + if dataset_out: authorization = await db["authorization"].find_one( { "$and": [ @@ -259,10 +257,8 @@ async def __call__( db: MongoClient = Depends(get_db), current_user: str = Depends(get_current_username), ): - if ( - metadata := await db["metadata"].find_one({"_id": ObjectId(metadata_id)}) - ) is not None: - md_out = MetadataOut.from_mongo(metadata) + md_out = await MetadataDB.find_one(MetadataDB.id == ObjectId(metadata_id)) + if md_out: resource_type = md_out.resource.collection resource_id = md_out.resource.resource_id if resource_type == "files": @@ -305,12 +301,10 @@ async def __call__( status_code=404, detail=f"Metadata {metadata_id} not found" ) elif resource_type == "datasets": - if ( - dataset := await db["datasets"].find_one( - {"_id": ObjectId(resource_id)} - ) - ) is not None: - dataset_out = DatasetOut.from_mongo(dataset) + dataset_out = await DatasetDB.find_one( + DatasetDB.id == ObjectId(resource_id) + ) + if dataset_out: authorization = await AuthorizationDB.find_one( AuthorizationDB.dataset_id == dataset_out.dataset_id, Or( diff --git a/backend/app/models/config.py b/backend/app/models/config.py index d6a105162..0d38ee0c7 100644 --- a/backend/app/models/config.py +++ b/backend/app/models/config.py @@ -1,3 +1,4 @@ +import pymongo from beanie import Document @@ -5,6 +6,14 @@ class ConfigEntryBase(Document): key: str value: str + class Settings: + name = "config" + indexes = [ + [ + ("key", pymongo.TEXT), + ], + ] + class ConfigEntryDB(ConfigEntryBase): pass diff --git a/backend/app/models/metadata.py b/backend/app/models/metadata.py index 8fa40474f..1f09633ab 100644 --- a/backend/app/models/metadata.py +++ b/backend/app/models/metadata.py @@ -1,30 +1,22 @@ import collections.abc -import traceback from datetime import datetime from typing import Optional, List, Union -from enum import Enum +from beanie import Document from elasticsearch import Elasticsearch -from bson import ObjectId -from bson.dbref import DBRef -from fastapi.param_functions import Depends -from pydantic import Field, validator, BaseModel, create_model, AnyUrl from fastapi import HTTPException -from pymongo import MongoClient +from pydantic import Field, validator, BaseModel, AnyUrl -from app import dependencies -from app.models.mongomodel import MongoModel, MongoDBRef -from app.models.pyobjectid import PyObjectId -from app.models.users import UserOut from app.models.listeners import ( EventListenerIn, LegacyEventListenerIn, EventListenerOut, ExtractorInfo, ) +from app.models.mongomodel import MongoDBRef +from app.models.users import UserOut from app.search.connect import update_record - # List of valid types that can be specified for metadata fields FIELD_TYPES = { "int": int, @@ -43,16 +35,16 @@ } # JSON schema can handle this for us? -class MetadataConfig(MongoModel): +class MetadataConfig(BaseModel): type: str = "str" # must be one of FIELD_TYPES -class MetadataEnumConfig(MongoModel): +class MetadataEnumConfig(BaseModel): type: str = "enum" options: List[str] # a list of options must be provided if type is enum -class MetadataField(MongoModel): +class MetadataField(BaseModel): name: str list: bool = False # whether a list[type] is acceptable widgetType: str = "TextField" # match material ui widget name? @@ -61,7 +53,7 @@ class MetadataField(MongoModel): required: bool = False # Whether the definition requires this field -class MetadataDefinitionBase(MongoModel): +class MetadataDefinitionBase(Document): """This describes a metadata object with a short name and description, predefined set of fields, and context. These provide a shorthand for use by listeners as well as a source for building GUI widgets to add new entries. @@ -103,14 +95,11 @@ class MetadataDefinitionBase(MongoModel): ] # https://json-ld.org/spec/latest/json-ld/#the-context context_url: Optional[str] # single URL applying to contents fields: List[MetadataField] - # TODO: Space-level requirements? + # TODO: Space-level requirements? -class RequiredMetadata(MongoModel): - # TODO: Endpoints to get lists of what is required, and update these flags - definition_name: str - required_on_files: bool - required_on_datasets: bool + class Settings: + name = "metadata_definitions" class MetadataDefinitionIn(MetadataDefinitionBase): @@ -173,7 +162,7 @@ def validate_definition(content: dict, metadata_def: MetadataDefinitionOut): return content -class MetadataAgent(MongoModel): +class MetadataAgent(BaseModel): """Describes the user who created a piece of metadata. If listener is provided, user refers to the user who triggered the job.""" @@ -181,7 +170,7 @@ class MetadataAgent(MongoModel): listener: Optional[EventListenerOut] -class MetadataBase(MongoModel): +class MetadataBase(Document): context: Optional[ List[Union[dict, AnyUrl]] ] # https://json-ld.org/spec/latest/json-ld/#the-context @@ -208,6 +197,9 @@ def definition_is_valid(cls, v): raise ValueError("Problem with definition.") return v + class Settings: + name = "metadata" + class MetadataIn(MetadataBase): file_version: Optional[int] @@ -220,23 +212,7 @@ class MetadataPatch(MetadataIn): metadata_id: Optional[str] # specific metadata ID we are patching -# class MetadataRes(): -# pass -# -# -# class MetadataReqPatch(): -# pass -# -# -# class MetadataResPatch(MetadataRes): -# pass -# -# -# class MetadataResDelete(MetadataRes): -# pass - - -class MetadataDelete(MongoModel): +class MetadataDelete(BaseModel): metadata_id: Optional[str] # specific metadata ID we are deleting definition: Optional[str] listener: Optional[EventListenerIn] @@ -267,7 +243,6 @@ class MetadataOut(MetadataDB): async def validate_context( - db: MongoClient, content: dict, definition: Optional[str] = None, context_url: Optional[str] = None, @@ -288,10 +263,8 @@ async def validate_context( if context_url is not None: pass if definition is not None: - if ( - md_def := await db["metadata.definitions"].find_one({"name": definition}) - ) is not None: - md_def = MetadataDefinitionOut(**md_def) + md_def = MetadataDefinitionDB.find_one(MetadataDefinitionDB.name == definition) + if md_def: content = validate_definition(content, md_def) else: raise HTTPException( @@ -311,27 +284,19 @@ def deep_update(orig: dict, new: dict): return orig -async def patch_metadata( - metadata: dict, new_entries: dict, db: MongoClient, es: Elasticsearch -): +async def patch_metadata(metadata: MetadataDB, new_entries: dict, es: Elasticsearch): """Convenience function for updating original metadata contents with new entries.""" - try: - # TODO: For list-type definitions, should we append to list instead? - updated_content = deep_update(metadata["content"], new_entries) - updated_content = await validate_context( - db, - updated_content, - metadata.get("definition", None), - metadata.get("context_url", None), - metadata.get("context", []), - ) - metadata["content"] = updated_content - db["metadata"].replace_one( - {"_id": metadata["_id"]}, MetadataDB(**metadata).to_mongo() - ) - # Update entry to the metadata index - doc = {"doc": {"content": metadata["content"]}} - update_record(es, "metadata", doc, metadata["_id"]) - except Exception as e: - raise e - return MetadataOut.from_mongo(metadata) + # TODO: For list-type definitions, should we append to list instead? + metadata.content = deep_update(metadata.content, new_entries) + metadata.content = await validate_context( + metadata.content, + metadata.definition, + metadata.context_url, + metadata.context, + ) + await metadata.save() + + # Update entry to the metadata index + doc = {"doc": {"content": metadata.content}} + update_record(es, "metadata", doc, metadata.id) + return metadata diff --git a/backend/app/routers/datasets.py b/backend/app/routers/datasets.py index 5d7a6406e..1b934a3a8 100644 --- a/backend/app/routers/datasets.py +++ b/backend/app/routers/datasets.py @@ -42,6 +42,7 @@ ) from app.models.files import FileOut, FileDB from app.models.folders import FolderOut, FolderIn, FolderDB +from app.models.metadata import MetadataDB from app.models.pyobjectid import PyObjectId from app.models.users import UserOut from app.rabbitmq.listeners import submit_dataset_job @@ -384,57 +385,39 @@ async def edit_dataset( dataset_id: str, dataset_info: DatasetBase, db: MongoClient = Depends(dependencies.get_db), - user_id=Depends(get_user), + user=Depends(get_current_user), es=Depends(dependencies.get_elasticsearchclient), allow: bool = Depends(Authorization("editor")), ): - if not allow: - raise HTTPException(status_code=404, detail=f"Dataset {dataset_id} not found") - # Check all connection and abort if any one of them is not available - if db is None or es is None: - raise HTTPException(status_code=503, detail="Service not available") - return - - if ( - dataset := await db["datasets"].find_one({"_id": ObjectId(dataset_id)}) - ) is not None: + dataset = await DatasetDB.find_one(DatasetDB.id == ObjectId(dataset_id)) + if dataset: # TODO: Refactor this with permissions checks etc. - ds = dict(dataset_info) if dataset_info is not None else {} - user = await db["users"].find_one({"email": user_id}) - ds["author"] = UserOut(**user) - ds["modified"] = datetime.datetime.utcnow() - try: - dataset.update(ds) - await db["datasets"].replace_one( - {"_id": ObjectId(dataset_id)}, DatasetDB(**dataset).to_mongo() - ) - # Update entry to the dataset index + dataset.update(dataset_info) + dataset.modified = datetime.datetime.utcnow() + await dataset.save() + + # Update entry to the dataset index + doc = { + "doc": { + "name": dataset.name, + "description": dataset.description, + "modified": dataset.modified, + } + } + update_record(es, "dataset", doc, dataset_id) + # updating metadata in elasticsearch + metadata = await MetadataDB.find_one( + MetadataDB.resource.resource_id == ObjectId(dataset_id) + ) + if metadata: doc = { "doc": { - "name": dataset["name"], - "description": dataset["description"], - "author": UserOut(**user).email, - "modified": dataset["modified"], + "name": dataset.name, + "description": dataset.description, } } - update_record(es, "dataset", doc, dataset_id) - # updating metadata in elasticsearch - if ( - metadata := await db["metadata"].find_one( - {"resource.resource_id": ObjectId(dataset_id)} - ) - ) is not None: - doc = { - "doc": { - "name": dataset["name"], - "description": dataset["description"], - "author": UserOut(**user).email, - } - } - update_record(es, "metadata", doc, str(metadata["_id"])) - except Exception as e: - raise HTTPException(status_code=500, detail=e.args[0]) - return DatasetOut.from_mongo(dataset) + update_record(es, "metadata", doc, str(metadata["_id"])) + return dataset raise HTTPException(status_code=404, detail=f"Dataset {dataset_id} not found") diff --git a/backend/app/routers/metadata.py b/backend/app/routers/metadata.py index 95dc64a3f..732ceb5b0 100644 --- a/backend/app/routers/metadata.py +++ b/backend/app/routers/metadata.py @@ -1,81 +1,77 @@ -import io -from datetime import datetime from typing import Optional, List +from elasticsearch import Elasticsearch from fastapi import ( APIRouter, HTTPException, Depends, ) -from pymongo import MongoClient from app import dependencies -from app.deps.authorization_deps import Authorization, MetadataAuthorization -from app.keycloak_auth import get_user, get_current_user -from app.models.pyobjectid import PyObjectId +from app.deps.authorization_deps import MetadataAuthorization +from app.keycloak_auth import get_current_user from app.models.metadata import ( MetadataDefinitionIn, MetadataDefinitionDB, MetadataDefinitionOut, - MetadataIn, MetadataDB, MetadataOut, MetadataPatch, patch_metadata, ) +from app.models.pyobjectid import PyObjectId router = APIRouter() @router.post("/definition", response_model=MetadataDefinitionOut) async def save_metadata_definition( - definition_in: MetadataDefinitionIn, - user=Depends(get_current_user), - db: MongoClient = Depends(dependencies.get_db), + definition_in: MetadataDefinitionIn, + user=Depends(get_current_user), ): - if ( - md_def := await db["metadata.definitions"].find_one( - {"name": definition_in.name} - ) - ) is not None: + existing = await MetadataDefinitionDB.find_one( + MetadataDefinitionDB.name == definition_in.name + ) + if existing: raise HTTPException( status_code=409, detail=f"Metadata definition named {definition_in.name} already exists.", ) - - md_def = MetadataDefinitionDB(**definition_in.dict(), creator=user) - new_md_def = await db["metadata.definitions"].insert_one(md_def.to_mongo()) - found = await db["metadata.definitions"].find_one({"_id": new_md_def.inserted_id}) - md_def_out = MetadataDefinitionOut.from_mongo(found) - return md_def_out + else: + md_def = MetadataDefinitionDB(**definition_in.dict(), creator=user) + return await md_def.save() @router.get("/definition", response_model=List[MetadataDefinitionOut]) async def get_metadata_definition( - name: Optional[str] = None, - user=Depends(get_current_user), - db: MongoClient = Depends(dependencies.get_db), - skip: int = 0, - limit: int = 2, + name: Optional[str] = None, + user=Depends(get_current_user), + skip: int = 0, + limit: int = 2, ): - definitions = [] if name is None: - root_query = db["metadata.definitions"].find() + return ( + await MetadataDefinitionDB.find() + .skip(skip) + .limit(limit) + .to_list(length=limit) + ) else: - root_query = db["metadata.definitions"].find({"name": name}) - - for doc in await root_query.skip(skip).limit(limit).to_list(length=limit): - definitions.append(MetadataDefinitionOut.from_mongo(doc)) - return definitions + return ( + await MetadataDefinitionDB.find(MetadataDefinitionDB.name == name) + .skip(skip) + .limit(limit) + .to_list(length=limit) + ) @router.patch("/{metadata_id}", response_model=MetadataOut) async def update_metadata( - metadata_in: MetadataPatch, - metadata_id: str, - user=Depends(get_current_user), - db: MongoClient = Depends(dependencies.get_db), - allow: bool = Depends(MetadataAuthorization("editor")), + metadata_in: MetadataPatch, + metadata_id: str, + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + user=Depends(get_current_user), + allow: bool = Depends(MetadataAuthorization("editor")), ): """Update metadata. Any fields provided in the contents JSON will be added or updated in the metadata. If context or agent should be changed, use PUT. @@ -83,30 +79,25 @@ async def update_metadata( Returns: Metadata document that was updated """ - if ( - md := await db["metadata"].find_one({"_id": PyObjectId(metadata_id)}) - ) is not None: + md = await MetadataDB.find_one(MetadataDB.id == PyObjectId(metadata_id)) + if md: # TODO: Refactor this with permissions checks etc. - contents = metadata_in.contents - result = await patch_metadata(md, contents, db) - return result + return await patch_metadata(md, metadata_in.contents, es) else: raise HTTPException(status_code=404, detail=f"Metadata {metadata_id} not found") @router.delete("/{metadata_id}") async def delete_metadata( - metadata_id: str, - user=Depends(get_current_user), - db: MongoClient = Depends(dependencies.get_db), - allow: bool = Depends(MetadataAuthorization("editor")), + metadata_id: str, + user=Depends(get_current_user), + allow: bool = Depends(MetadataAuthorization("editor")), ): """Delete metadata by specific ID.""" - if ( - md := await db["metadata"].find_one({"_id": PyObjectId(metadata_id)}) - ) is not None: + md = await MetadataDB.find_one(MetadataDB.id == PyObjectId(metadata_id)) + if md: # TODO: Refactor this with permissions checks etc. - await db["metadata"].delete_one({"_id": PyObjectId(metadata_id)}) + await MetadataDB.delete(MetadataDB.id == PyObjectId(metadata_id)) return {"deleted": metadata_id} else: raise HTTPException(status_code=404, detail=f"Metadata {metadata_id} not found") diff --git a/backend/app/routers/metadata_datasets.py b/backend/app/routers/metadata_datasets.py index 4a7851477..09c8cebf1 100644 --- a/backend/app/routers/metadata_datasets.py +++ b/backend/app/routers/metadata_datasets.py @@ -5,17 +5,16 @@ from elasticsearch import Elasticsearch from fastapi import APIRouter, HTTPException, Depends from fastapi import Form -from pymongo import MongoClient from app import dependencies from app.deps.authorization_deps import Authorization from app.keycloak_auth import get_current_user, UserOut from app.models.datasets import DatasetOut, DatasetDB -from app.models.listeners import LegacyEventListenerIn +from app.models.listeners import LegacyEventListenerIn, EventListenerDB from app.models.metadata import ( MongoDBRef, MetadataAgent, - MetadataDefinitionOut, + MetadataDefinitionDB, MetadataIn, MetadataDB, MetadataOut, @@ -32,14 +31,12 @@ async def _build_metadata_db_obj( - db: MongoClient, metadata_in: MetadataIn, dataset: DatasetOut, user: UserOut, agent: MetadataAgent = None, ): content = await validate_context( - db, metadata_in.content, metadata_in.definition, metadata_in.context_url, @@ -50,12 +47,12 @@ async def _build_metadata_db_obj( # Build MetadataAgent depending on whether extractor info is present if metadata_in.extractor is not None: extractor_in = LegacyEventListenerIn(**metadata_in.extractor.dict()) - if ( - extractor := await db["listeners"].find_one( - {"_id": extractor_in.id, "version": extractor_in.version} - ) - ) is not None: - agent = MetadataAgent(creator=user, extractor=extractor) + listener = await EventListenerDB.find_one( + EventListenerDB.id == extractor_in.id, + EventListenerDB.version == extractor_in.version, + ) + if listener: + agent = MetadataAgent(creator=user, listener=listener) else: raise HTTPException(status_code=404, detail=f"Listener not found") else: @@ -78,7 +75,6 @@ async def add_dataset_metadata( metadata_in: MetadataIn, dataset_id: str, user=Depends(get_current_user), - db: MongoClient = Depends(dependencies.get_db), es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), allow: bool = Depends(Authorization("uploader")), ): @@ -88,10 +84,8 @@ async def add_dataset_metadata( Returns: Metadata document that was added to database """ - if ( - dataset := await db["datasets"].find_one({"_id": ObjectId(dataset_id)}) - ) is not None: - dataset = DatasetOut(**dataset) + dataset = DatasetDB.find_one(DatasetDB.id == ObjectId(dataset_id)) + if dataset: # If dataset already has metadata using this definition, don't allow duplication definition = metadata_in.definition if definition is not None: @@ -102,32 +96,31 @@ async def add_dataset_metadata( existing_q["agent.listener.version"] = metadata_in.extractor.version else: existing_q["agent.creator.id"] = user.id - if (existing := await db["metadata"].find_one(existing_q)) is not None: + existing = await MetadataDB.find_one(existing_q) + if existing: raise HTTPException( 409, f"Metadata for {definition} already exists on this dataset" ) - md = await _build_metadata_db_obj(db, metadata_in, dataset, user) - new_metadata = await db["metadata"].insert_one(md.to_mongo()) - found = await db["metadata"].find_one({"_id": new_metadata.inserted_id}) - metadata_out = MetadataOut.from_mongo(found) + md = await _build_metadata_db_obj(metadata_in, dataset, user) + await md.save() # Add an entry to the metadata index doc = { "resource_id": dataset_id, "resource_type": "dataset", - "created": metadata_out.created.utcnow(), + "created": md.created.utcnow(), "creator": user.email, - "content": metadata_out.content, - "context_url": metadata_out.context_url, - "context": metadata_out.context, + "content": md.content, + "context_url": md.context_url, + "context": md.context, "name": dataset.name, "resource_created": dataset.created, "author": dataset.author.email, "description": dataset.description, } - insert_record(es, "metadata", doc, metadata_out.id) - return metadata_out + insert_record(es, "metadata", doc, md.id) + return md @router.put("/{dataset_id}/metadata", response_model=MetadataOut) @@ -135,7 +128,6 @@ async def replace_dataset_metadata( metadata_in: MetadataIn, dataset_id: str, user=Depends(get_current_user), - db: MongoClient = Depends(dependencies.get_db), es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), allow: bool = Depends(Authorization("editor")), ): @@ -145,22 +137,18 @@ async def replace_dataset_metadata( Returns: Metadata document that was updated """ - if ( - dataset := await db["datasets"].find_one({"_id": ObjectId(dataset_id)}) - ) is not None: + dataset = DatasetDB.find_one(DatasetDB.id == ObjectId(dataset_id)) + if dataset: query = {"resource.resource_id": ObjectId(dataset_id)} # Filter by MetadataAgent if metadata_in.extractor is not None: - if ( - extractor := await db["listeners"].find_one( - { - "name": metadata_in.extractor.name, - "version": metadata_in.extractor.version, - } - ) - ) is not None: - agent = MetadataAgent(creator=user, extractor=extractor) + listener = EventListenerDB.find_one( + EventListenerDB.name == metadata_in.extractor.name, + EventListenerDB.version == metadata_in.extractor.version, + ) + if listener: + agent = MetadataAgent(creator=user, listener=listener) # TODO: How do we handle two different users creating extractor metadata? Currently we ignore user query["agent.listener.name"] = agent.listener.name query["agent.listener.version"] = agent.listener.version @@ -170,18 +158,15 @@ async def replace_dataset_metadata( agent = MetadataAgent(creator=user) query["agent.creator.id"] = agent.creator.id - if (md := await db["metadata"].find_one(query)) is not None: + md = await MetadataDB.find_one(query) + if md: # Metadata exists, so prepare the new document we are going to replace it with - md_obj = _build_metadata_db_obj(db, metadata_in, dataset, user, agent=agent) - new_metadata = await db["metadata"].replace_one( - {"_id": md["_id"]}, md_obj.to_mongo() - ) - found = await db["metadata"].find_one({"_id": md["_id"]}) - metadata_out = MetadataOut.from_mongo(found) + md = _build_metadata_db_obj(metadata_in, dataset, user, agent=agent) + await md.save() # Update entry to the metadata index - doc = {"doc": {"content": metadata_out["content"]}} - update_record(es, "metadata", doc, metadata_out["_id"]) - return metadata_out + doc = {"doc": {"content": md.content}} + update_record(es, "metadata", doc, md.id) + return md else: raise HTTPException(status_code=404, detail=f"Dataset {dataset_id} not found") @@ -191,7 +176,6 @@ async def update_dataset_metadata( metadata_in: MetadataPatch, dataset_id: str, user=Depends(get_current_user), - db: MongoClient = Depends(dependencies.get_db), es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), allow: bool = Depends(Authorization("editor")), ): @@ -201,25 +185,22 @@ async def update_dataset_metadata( Returns: Metadata document that was updated """ - if ( - dataset := await db["datasets"].find_one({"_id": ObjectId(dataset_id)}) - ) is not None: + dataset = DatasetDB.find_one(DatasetDB.id == ObjectId(dataset_id)) + if dataset: query = {"resource.resource_id": ObjectId(dataset_id)} content = metadata_in.content if metadata_in.metadata_id is not None: # If a specific metadata_id is provided, validate the patch against existing context - if ( - existing_md := await db["metadata"].find_one( - {"_id": ObjectId(metadata_in.metadata_id)} - ) - ) is not None: + existing = await MetadataDB.find_one( + MetadataDB.id == ObjectId(metadata_in.metadata_id) + ) + if existing: content = await validate_context( - db, metadata_in.content, - existing_md.definition, - existing_md.context_url, - existing_md.context, + existing.definition, + existing.context_url, + existing.context, ) query["_id"] = metadata_in.metadata_id else: @@ -231,14 +212,11 @@ async def update_dataset_metadata( # Filter by MetadataAgent if metadata_in.extractor is not None: - if ( - listener := await db["listeners"].find_one( - { - "name": metadata_in.extractor.name, - "version": metadata_in.extractor.version, - } - ) - ) is not None: + listener = EventListenerDB.find_one( + EventListenerDB.name == metadata_in.extractor.name, + EventListenerDB.version == metadata_in.extractor.version, + ) + if listener: agent = MetadataAgent(creator=user, listener=listener) # TODO: How do we handle two different users creating extractor metadata? Currently we ignore user query["agent.listener.name"] = agent.listener.name @@ -249,16 +227,15 @@ async def update_dataset_metadata( agent = MetadataAgent(creator=user) query["agent.creator.id"] = agent.creator.id - if (md := await db["metadata"].find_one(query)) is not None: + md = await MetadataDB.find_one(query) + if md: # TODO: Refactor this with permissions checks etc. - result = await patch_metadata(md, content, db, es) - return result + return await patch_metadata(md, content, es) else: raise HTTPException( status_code=404, detail=f"Metadata matching the query not found" ) - else: - raise HTTPException(status_code=404, detail=f"Dataset {dataset_id} not found") + raise HTTPException(status_code=404, detail=f"Dataset {dataset_id} not found") @router.get("/{dataset_id}/metadata", response_model=List[MetadataOut]) @@ -267,33 +244,26 @@ async def get_dataset_metadata( listener_name: Optional[str] = Form(None), listener_version: Optional[float] = Form(None), user=Depends(get_current_user), - db: MongoClient = Depends(dependencies.get_db), allow: bool = Depends(Authorization("viewer")), ): - # if ( - # dataset := await db["datasets"].find_one({"_id": ObjectId(dataset_id)}) - # ) is not None: dataset = await DatasetDB.get(dataset_id) if dataset is not None: query = {"resource.resource_id": ObjectId(dataset_id)} - if listener_name is not None: query["agent.listener.name"] = listener_name if listener_version is not None: query["agent.listener.version"] = listener_version metadata = [] - async for md in db["metadata"].find(query): - md_out = MetadataOut.from_mongo(md) - if md_out.definition is not None: - if ( - md_def := await db["metadata.definitions"].find_one( - {"name": md_out.definition} - ) - ) is not None: - md_def = MetadataDefinitionOut(**md_def) - md_out.description = md_def.description - metadata.append(md_out) + for md in await MetadataDB.find(query): + # TODO: Can this be accomplished with a view? + if md.definition is not None: + md_def = MetadataDefinitionDB.find_one( + MetadataDefinitionDB.name == md.definition + ) + if md_def: + md.description = md_def.description + metadata.append(md) return metadata else: raise HTTPException(status_code=404, detail=f"Dataset {dataset_id} not found") @@ -304,23 +274,25 @@ async def delete_dataset_metadata( metadata_in: MetadataDelete, dataset_id: str, user=Depends(get_current_user), - db: MongoClient = Depends(dependencies.get_db), es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), allow: bool = Depends(Authorization("editor")), ): - if ( - dataset := await db["datasets"].find_one({"_id": ObjectId(dataset_id)}) - ) is not None: + dataset = await DatasetDB.get(dataset_id) + if dataset: # filter by metadata_id or definition query = {"resource.resource_id": ObjectId(dataset_id)} if metadata_in.metadata_id is not None: # If a specific metadata_id is provided, delete the matching entry - if ( - existing_md := await db["metadata"].find_one( - {"metadata_id": ObjectId(metadata_in.metadata_id)} - ) - ) is not None: + existing = await MetadataDB.find_one( + MetadataDB.id == ObjectId(metadata_in.metadata_id) + ) + if existing: query["metadata_id"] = metadata_in.metadata_id + else: + raise HTTPException( + status_code=404, + detail=f"Metadata id {metadata_in.metadata_id} not found", + ) else: # Use provided definition name as filter # TODO: Should context_url also be unique to the file version? @@ -328,16 +300,15 @@ async def delete_dataset_metadata( if definition is not None: query["definition"] = definition - # if extractor info is provided # Filter by MetadataAgent extractor_info = metadata_in.extractor_info if extractor_info is not None: - if ( - extractor := await db["listeners"].find_one( - {"name": extractor_info.name, "version": extractor_info.version} - ) - ) is not None: - agent = MetadataAgent(creator=user, extractor=extractor) + listener = EventListenerDB.find_one( + EventListenerDB.name == metadata_in.extractor.name, + EventListenerDB.version == metadata_in.extractor.version, + ) + if listener: + agent = MetadataAgent(creator=user, listener=listener) # TODO: How do we handle two different users creating extractor metadata? Currently we ignore user query["agent.listener.name"] = agent.listener.name query["agent.listener.version"] = agent.listener.version @@ -350,10 +321,9 @@ async def delete_dataset_metadata( # delete from elasticsearch delete_document_by_id(es, "metadata", str(metadata_in.id)) - if (md := await db["metadata"].find_one(query)) is not None: - metadata_deleted = md - if await db["metadata"].delete_one({"_id": md["_id"]}) is not None: - return MetadataOut.from_mongo(metadata_deleted) + md = MetadataDB.find_one(query) + if md: + return await md.delete() else: raise HTTPException( status_code=404, detail=f"No metadata found with that criteria" diff --git a/backend/app/routers/metadata_files.py b/backend/app/routers/metadata_files.py index dfa990bb9..9c9c69f74 100644 --- a/backend/app/routers/metadata_files.py +++ b/backend/app/routers/metadata_files.py @@ -1,8 +1,7 @@ -import io -from datetime import datetime from typing import Optional, List -from elasticsearch import Elasticsearch + from bson import ObjectId +from elasticsearch import Elasticsearch from fastapi import ( APIRouter, HTTPException, @@ -13,8 +12,9 @@ from app import dependencies from app.deps.authorization_deps import FileAuthorization -from app.config import settings +from app.keycloak_auth import get_current_user, UserOut from app.models.files import FileOut +from app.models.listeners import EventListenerDB from app.models.metadata import ( MongoDBRef, MetadataAgent, @@ -23,12 +23,11 @@ MetadataDB, MetadataOut, MetadataPatch, - validate_definition, validate_context, patch_metadata, MetadataDelete, + MetadataDefinitionDB, ) -from app.keycloak_auth import get_user, get_current_user, get_token, UserOut from app.search.connect import insert_record, update_record, delete_document_by_id router = APIRouter() @@ -45,7 +44,6 @@ async def _build_metadata_db_obj( """Convenience function for building a MetadataDB object from incoming metadata plus a file. Agent and file version will be determined based on inputs if they are not provided directly.""" content = await validate_context( - db, metadata_in.content, metadata_in.definition, metadata_in.context_url, @@ -77,12 +75,12 @@ async def _build_metadata_db_obj( # Build MetadataAgent depending on whether extractor info is present/valid extractor_info = metadata_in.extractor_info if extractor_info is not None: - if ( - extractor := await db["listeners"].find_one( - {"name": extractor_info.name, "version": extractor_info.version} - ) - ) is not None: - agent = MetadataAgent(creator=user, listener=extractor) + listener = await EventListenerDB.find_one( + EventListenerDB.name == extractor_info.name, + EventListenerDB.version == extractor_info.version, + ) + if listener: + agent = MetadataAgent(creator=user, listener=listener) else: raise HTTPException(status_code=404, detail=f"Extractor not found") else: @@ -136,27 +134,26 @@ async def add_file_metadata( ] = metadata_in.extractor_info.version else: existing_q["agent.creator.id"] = user.id - if (existing := await db["metadata"].find_one(existing_q)) is not None: + existing = await MetadataDB.find_one(existing_q) + if existing: # Allow creating duplicate entry only if the file version is different - if existing["resource"]["version"] == metadata_in.file_version: + if existing.resource.version == metadata_in.file_version: raise HTTPException( 409, f"Metadata for {definition} already exists on this file" ) - md = await _build_metadata_db_obj(db, metadata_in, file, user) - new_metadata = await db["metadata"].insert_one(md.to_mongo()) - found = await db["metadata"].find_one({"_id": new_metadata.inserted_id}) - metadata_out = MetadataOut.from_mongo(found) + md = await _build_metadata_db_obj(metadata_in, file, user) + await md.save() # Add an entry to the metadata index doc = { "resource_id": file_id, "resource_type": "file", - "created": metadata_out.created.utcnow(), + "created": md.created.utcnow(), "creator": user.email, - "content": metadata_out.content, - "context_url": metadata_out.context_url, - "context": metadata_out.context, + "content": md.content, + "context_url": md.context_url, + "context": md.context, "name": file.name, "folder_id": str(file.folder_id), "dataset_id": str(file.dataset_id), @@ -165,8 +162,8 @@ async def add_file_metadata( "resource_creator": file.creator.email, "bytes": file.bytes, } - insert_record(es, "metadata", doc, metadata_out.id) - return metadata_out + insert_record(es, "metadata", doc, md.id) + return md @router.put("/{file_id}/metadata", response_model=MetadataOut) @@ -206,12 +203,12 @@ async def replace_file_metadata( # Filter by MetadataAgent extractor_info = metadata_in.extractor if extractor_info is not None: - if ( - extractor := await db["listeners"].find_one( - {"name": extractor_info.name, "version": extractor_info.version} - ) - ) is not None: - agent = MetadataAgent(creator=user, extractor=extractor) + listener = await EventListenerDB.find_one( + EventListenerDB.name == extractor_info.name, + EventListenerDB.version == extractor_info.version, + ) + if listener: + agent = MetadataAgent(creator=user, listener=listener) # TODO: How do we handle two different users creating extractor metadata? Currently we ignore user... query["agent.extractor.name"] = agent.extractor.name query["agent.extractor.version"] = agent.extractor.version @@ -221,21 +218,18 @@ async def replace_file_metadata( agent = MetadataAgent(creator=user) query["agent.creator.id"] = agent.creator.id - if (md := await db["metadata"].find_one(query)) is not None: + md = await MetadataDB.find_one(query) + if md: # Metadata exists, so prepare the new document we are going to replace it with - md_obj = await _build_metadata_db_obj( - db, metadata_in, file, user, agent=agent, version=target_version - ) - new_metadata = await db["metadata"].replace_one( - {"_id": md["_id"]}, md_obj.to_mongo() + md = await _build_metadata_db_obj( + metadata_in, file, user, agent=agent, version=target_version ) - found = await db["metadata"].find_one({"_id": md["_id"]}) - metadata_out = MetadataOut.from_mongo(found) + await md.save() # Update entry to the metadata index - doc = {"doc": {"content": found["content"]}} - update_record(es, "metadata", doc, md["_id"]) - return metadata_out + doc = {"doc": {"content": md.content}} + update_record(es, "metadata", doc, md.id) + return md else: raise HTTPException(status_code=404, detail=f"No metadata found to update") else: @@ -259,16 +253,12 @@ async def update_file_metadata( """ # check if metadata with file version exists, replace metadata if none exists - if ( - version_md := await db["metadata"].find_one( - { - "resource.resource_id": ObjectId(file_id), - "resource.version": metadata_in.file_version, - } - ) - ) is None: - result = await replace_file_metadata(metadata_in, file_id, user, db, es) - return result + version_md = await MetadataDB.find_one( + MetadataDB.resource.resource_id == ObjectId(file_id), + MetadataDB.resource.version == metadata_in.file_version, + ) + if version_md: + return await replace_file_metadata(metadata_in, file_id, user, db, es) if (file := await db["files"].find_one({"_id": ObjectId(file_id)})) is not None: query = {"resource.resource_id": ObjectId(file_id)} @@ -277,13 +267,11 @@ async def update_file_metadata( if metadata_in.metadata_id is not None: # If a specific metadata_id is provided, validate the patch against existing context - if ( - existing_md := await db["metadata"].find_one( - {"_id": ObjectId(metadata_in.metadata_id)} - ) - ) is not None: + existing_md = await MetadataDB.find_one( + MetadataDB.id == ObjectId(metadata_in.metadata_id) + ) + if existing_md: content = await validate_context( - db, metadata_in.content, existing_md.definition, existing_md.context_url, @@ -316,12 +304,12 @@ async def update_file_metadata( # Filter by MetadataAgent extractor_info = metadata_in.extractor if extractor_info is not None: - if ( - extractor := await db["listeners"].find_one( - {"name": extractor_info.name, "version": extractor_info.version} - ) - ) is not None: - agent = MetadataAgent(creator=user, extractor=extractor) + listener = await EventListenerDB.find_one( + EventListenerDB.name == extractor_info.name, + EventListenerDB.version == extractor_info.version, + ) + if listener: + agent = MetadataAgent(creator=user, listener=listener) # TODO: How do we handle two different users creating extractor metadata? Currently we ignore user query["agent.extractor.name"] = agent.extractor.name @@ -334,10 +322,10 @@ async def update_file_metadata( # query["agent.creator.id"] = agent.creator.id pass - if (md := await db["metadata"].find_one(query)) is not None: + md = await MetadataDB.find_one(query) + if md: # TODO: Refactor this with permissions checks etc. - result = await patch_metadata(md, content, db, es) - return result + return await patch_metadata(md, content, es) else: raise HTTPException(status_code=404, detail=f"No metadata found to update") else: @@ -388,17 +376,14 @@ async def get_file_metadata( query["agent.extractor.version"] = extractor_version metadata = [] - async for md in db["metadata"].find(query): - md_out = MetadataOut.from_mongo(md) - if md_out.definition is not None: - if ( - md_def := await db["metadata.definitions"].find_one( - {"name": md_out.definition} - ) - ) is not None: - md_def = MetadataDefinitionOut(**md_def) - md_out.description = md_def.description - metadata.append(md_out) + for md in await MetadataDB.find(query): + if md.definition is not None: + md_def = MetadataDefinitionDB.find_one( + MetadataDefinitionDB.name == md.definition + ) + if md_def: + md.description = md_def.description + metadata.append(md) return metadata else: raise HTTPException(status_code=404, detail=f"File {file_id} not found") @@ -437,12 +422,16 @@ async def delete_file_metadata( # filter by metadata_id or definition if metadata_in.metadata_id is not None: # If a specific metadata_id is provided, delete the matching entry - if ( - existing_md := await db["metadata"].find_one( - {"metadata_id": ObjectId(metadata_in.metadata_id)} - ) - ) is not None: + existing_md = MetadataDB.find_one( + MetadataDB.metadata_id == ObjectId(metadata_in.metadata_id) + ) + if existing_md: query["metadata_id"] = metadata_in.metadata_id + else: + raise HTTPException( + status_code=404, + detail=f"Metadata id {metadata_in.metadata_id} not found", + ) else: # Use provided definition name as filter # TODO: Should context_url also be unique to the file version? @@ -454,12 +443,12 @@ async def delete_file_metadata( # Filter by MetadataAgent extractor_info = metadata_in.extractor if extractor_info is not None: - if ( - extractor := await db["listeners"].find_one( - {"name": extractor_info.name, "version": extractor_info.version} - ) - ) is not None: - agent = MetadataAgent(creator=user, extractor=extractor) + listener = await EventListenerDB.find_one( + EventListenerDB.name == extractor_info.name, + EventListenerDB.version == extractor_info.version, + ) + if listener: + agent = MetadataAgent(creator=user, listener=listener) # TODO: How do we handle two different users creating extractor metadata? Currently we ignore user query["agent.extractor.name"] = agent.extractor.name query["agent.extractor.version"] = agent.extractor.version @@ -472,13 +461,11 @@ async def delete_file_metadata( # delete from elasticsearch delete_document_by_id(es, "metadata", str(metadata_in.id)) - if (md := await db["metadata"].find_one(query)) is not None: - metadata_deleted = md - if await db["metadata"].delete_one({"_id": md["_id"]}) is not None: - return MetadataOut.from_mongo(metadata_deleted) + md = MetadataDB.find_one(query) + if md: + return await md.delete() else: raise HTTPException( status_code=404, detail=f"No metadata found with that criteria" ) - else: - raise HTTPException(status_code=404, detail=f"File {file_id} not found") + raise HTTPException(status_code=404, detail=f"File {file_id} not found") From 123ef91f7bc01aca81771489d20fda6cc0aaa733 Mon Sep 17 00:00:00 2001 From: Max Burnette Date: Mon, 8 May 2023 11:12:44 -0500 Subject: [PATCH 22/32] Update datasets.py --- backend/app/routers/datasets.py | 288 ++++++++++++++------------------ 1 file changed, 123 insertions(+), 165 deletions(-) diff --git a/backend/app/routers/datasets.py b/backend/app/routers/datasets.py index 1b934a3a8..616425c03 100644 --- a/backend/app/routers/datasets.py +++ b/backend/app/routers/datasets.py @@ -425,58 +425,37 @@ async def edit_dataset( async def patch_dataset( dataset_id: str, dataset_info: DatasetPatch, - user_id=Depends(get_user), - db: MongoClient = Depends(dependencies.get_db), + user=Depends(get_current_user()), es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), allow: bool = Depends(Authorization("editor")), ): - if not allow: - raise HTTPException(status_code=404, detail=f"Dataset {dataset_id} not found") - # Check all connection and abort if any one of them is not available - if db is None or es is None: - raise HTTPException(status_code=503, detail="Service not available") - return - - if ( - dataset := await db["datasets"].find_one({"_id": ObjectId(dataset_id)}) - ) is not None: + dataset = await DatasetDB.find_one(DatasetDB.id == ObjectId(dataset_id)) + if dataset: # TODO: Refactor this with permissions checks etc. - ds = dict(dataset_info) if dataset_info is not None else {} - user = await db["users"].find_one({"email": user_id}) - ds["author"] = UserOut(**user) - ds["modified"] = datetime.datetime.utcnow() - try: - dataset.update((k, v) for k, v in ds.items() if v is not None) - await db["datasets"].replace_one( - {"_id": ObjectId(dataset_id)}, DatasetDB(**dataset).to_mongo() - ) - # Update entry to the dataset index + dataset.update(dataset_info) + dataset.modified = datetime.datetime.utcnow() + # Update entry to the dataset index + doc = { + "doc": { + "name": dataset.name, + "description": dataset.description, + "modified": dataset.modified, + } + } + update_record(es, "dataset", doc, dataset_id) + # updating metadata in elasticsearch + metadata = MetadataDB.find_one( + MetadataDB.resource.resource_id == ObjectId(dataset_id) + ) + if metadata: doc = { "doc": { - "name": dataset["name"], - "description": dataset["description"], - "author": UserOut(**user).email, - "modified": dataset["modified"], + "name": dataset.name, + "description": dataset.description, } } - update_record(es, "dataset", doc, dataset_id) - # updating metadata in elasticsearch - if ( - metadata := await db["metadata"].find_one( - {"resource.resource_id": ObjectId(dataset_id)} - ) - ) is not None: - doc = { - "doc": { - "name": dataset["name"], - "description": dataset["description"], - "author": UserOut(**user).email, - } - } - update_record(es, "metadata", doc, str(metadata["_id"])) - except Exception as e: - raise HTTPException(status_code=500, detail=e.args[0]) - return DatasetOut.from_mongo(dataset) + update_record(es, "metadata", doc, str(metadata["_id"])) + return dataset @router.delete("/{dataset_id}") @@ -487,14 +466,8 @@ async def delete_dataset( es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), allow: bool = Depends(Authorization("editor")), ): - if not allow: - raise HTTPException(status_code=404, detail=f"Dataset {dataset_id} not found") - # Check all connection and abort if any one of them is not available - if db is None or fs is None or es is None: - raise HTTPException(status_code=503, detail="Service not available") - return - - if (await db["datasets"].find_one({"_id": ObjectId(dataset_id)})) is not None: + dataset = await DatasetDB.find_one(DatasetDB.id == ObjectId(dataset_id)) + if dataset: # delete from elasticsearch delete_document_by_id(es, "dataset", dataset_id) query = {"match": {"resource_id": dataset_id}} @@ -508,8 +481,7 @@ async def delete_dataset( await remove_file_entry(file.id, db, fs, es) await db["folders"].delete_many({"dataset_id": ObjectId(dataset_id)}) return {"deleted": dataset_id} - else: - raise HTTPException(status_code=404, detail=f"Dataset {dataset_id} not found") + raise HTTPException(status_code=404, detail=f"Dataset {dataset_id} not found") @router.post("/{dataset_id}/folders", response_model=FolderOut) @@ -520,23 +492,24 @@ async def add_folder( db: MongoClient = Depends(dependencies.get_db), allow: bool = Depends(Authorization("uploader")), ): - if not allow: - raise HTTPException(status_code=404, detail=f"Dataset {dataset_id} not found") - folder_dict = folder_in.dict() - folder_db = FolderDB( - **folder_in.dict(), author=user, dataset_id=PyObjectId(dataset_id) - ) - parent_folder = folder_in.parent_folder - if parent_folder is not None: - folder = await db["folders"].find_one({"_id": ObjectId(parent_folder)}) - if folder is None: - raise HTTPException( - status_code=400, detail=f"Parent folder {parent_folder} not found" - ) - new_folder = await db["folders"].insert_one(folder_db.to_mongo()) - found = await db["folders"].find_one({"_id": new_folder.inserted_id}) - folder_out = FolderOut.from_mongo(found) - return folder_out + dataset = await DatasetDB.find_one(DatasetDB.id == ObjectId(dataset_id)) + if dataset: + folder_dict = folder_in.dict() + folder_db = FolderDB( + **folder_in.dict(), author=user, dataset_id=PyObjectId(dataset_id) + ) + parent_folder = folder_in.parent_folder + if parent_folder is not None: + folder = await db["folders"].find_one({"_id": ObjectId(parent_folder)}) + if folder is None: + raise HTTPException( + status_code=400, detail=f"Parent folder {parent_folder} not found" + ) + new_folder = await db["folders"].insert_one(folder_db.to_mongo()) + found = await db["folders"].find_one({"_id": new_folder.inserted_id}) + folder_out = FolderOut.from_mongo(found) + return folder_out + raise HTTPException(status_code=404, detail=f"Dataset {dataset_id} not found") @router.get("/{dataset_id}/folders") @@ -547,33 +520,34 @@ async def get_dataset_folders( db: MongoClient = Depends(dependencies.get_db), allow: bool = Depends(Authorization("viewer")), ): - if not allow: - raise HTTPException(status_code=404, detail=f"Dataset {dataset_id} not found") - folders = [] - if parent_folder is None: - async for f in db["folders"].find( - {"dataset_id": ObjectId(dataset_id), "parent_folder": None} - ): - folders.append(FolderDB.from_mongo(f)) - else: - async for f in db["folders"].find( - { - "$and": [ - { - "dataset_id": ObjectId(dataset_id), - "parent_folder": ObjectId(parent_folder), - }, - { - "$or": [ - {"author.email": user_id}, - {"auth": {"$elemMatch": {"user_ids": user_id}}}, - ] - }, - ] - } - ): - folders.append(FolderDB.from_mongo(f)) - return folders + dataset = await DatasetDB.find_one(DatasetDB.id == ObjectId(dataset_id)) + if dataset: + folders = [] + if parent_folder is None: + async for f in db["folders"].find( + {"dataset_id": ObjectId(dataset_id), "parent_folder": None} + ): + folders.append(FolderDB.from_mongo(f)) + else: + async for f in db["folders"].find( + { + "$and": [ + { + "dataset_id": ObjectId(dataset_id), + "parent_folder": ObjectId(parent_folder), + }, + { + "$or": [ + {"author.email": user_id}, + {"auth": {"$elemMatch": {"user_ids": user_id}}}, + ] + }, + ] + } + ): + folders.append(FolderDB.from_mongo(f)) + return folders + raise HTTPException(status_code=404, detail=f"Dataset {dataset_id} not found") @router.delete("/{dataset_id}/folders/{folder_id}") @@ -585,51 +559,52 @@ async def delete_folder( es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), allow: bool = Depends(Authorization("editor")), ): - if not allow: - raise HTTPException(status_code=404, detail=f"Dataset {dataset_id} not found") - if (await db["folders"].find_one({"_id": ObjectId(folder_id)})) is not None: - # delete current folder and files - await remove_folder_entry(folder_id, db) - async for file in db["files"].find({"folder_id": ObjectId(folder_id)}): - file = FileOut(**file) - await remove_file_entry(file.id, db, fs, es) + dataset = await DatasetDB.find_one(DatasetDB.id == ObjectId(dataset_id)) + if dataset: + if (await db["folders"].find_one({"_id": ObjectId(folder_id)})) is not None: + # delete current folder and files + await remove_folder_entry(folder_id, db) + async for file in db["files"].find({"folder_id": ObjectId(folder_id)}): + file = FileOut(**file) + await remove_file_entry(file.id, db, fs, es) + + # list all child folders and delete child folders/files + parent_folder_id = folder_id + + async def _delete_nested_folders(parent_folder_id): + while ( + folders := await db["folders"].find_one( + { + "dataset_id": ObjectId(dataset_id), + "parent_folder": ObjectId(parent_folder_id), + } + ) + ) is not None: + async for folder in db["folders"].find( + { + "dataset_id": ObjectId(dataset_id), + "parent_folder": ObjectId(parent_folder_id), + } + ): + folder = FolderOut(**folder) + parent_folder_id = folder.id - # list all child folders and delete child folders/files - parent_folder_id = folder_id + # recursively delete child folder and files + await _delete_nested_folders(parent_folder_id) - async def _delete_nested_folders(parent_folder_id): - while ( - folders := await db["folders"].find_one( - { - "dataset_id": ObjectId(dataset_id), - "parent_folder": ObjectId(parent_folder_id), - } - ) - ) is not None: - async for folder in db["folders"].find( - { - "dataset_id": ObjectId(dataset_id), - "parent_folder": ObjectId(parent_folder_id), - } - ): - folder = FolderOut(**folder) - parent_folder_id = folder.id - - # recursively delete child folder and files - await _delete_nested_folders(parent_folder_id) - - await remove_folder_entry(folder.id, db) - async for file in db["files"].find( - {"folder_id": ObjectId(folder.id)} - ): - file = FileOut(**file) - await remove_file_entry(file.id, db, fs, es) + await remove_folder_entry(folder.id, db) + async for file in db["files"].find( + {"folder_id": ObjectId(folder.id)} + ): + file = FileOut(**file) + await remove_file_entry(file.id, db, fs, es) - await _delete_nested_folders(parent_folder_id) + await _delete_nested_folders(parent_folder_id) - return {"deleted": folder_id} - else: - raise HTTPException(status_code=404, detail=f"Folder {folder_id} not found") + return {"deleted": folder_id} + else: + raise HTTPException(status_code=404, detail=f"Folder {folder_id} not found") + raise HTTPException(status_code=404, detail=f"Dataset {dataset_id} not found") @router.post("/{dataset_id}/files", response_model=FileOut) @@ -645,11 +620,8 @@ async def save_file( credentials: HTTPAuthorizationCredentials = Security(security), allow: bool = Depends(Authorization("uploader")), ): - if not allow: - raise HTTPException(status_code=404, detail=f"Dataset {dataset_id} not found") - if ( - dataset := await db["datasets"].find_one({"_id": ObjectId(dataset_id)}) - ) is not None: + dataset = await DatasetDB.find_one(DatasetDB.id == ObjectId(dataset_id)) + if dataset: if user is None: raise HTTPException( status_code=401, detail=f"User not found. Session might have expired." @@ -682,8 +654,7 @@ async def save_file( ) return fileDB - else: - raise HTTPException(status_code=404, detail=f"Dataset {dataset_id} not found") + raise HTTPException(status_code=404, detail=f"Dataset {dataset_id} not found") @router.post("/createFromZip", response_model=DatasetOut) @@ -696,11 +667,6 @@ async def create_dataset_from_zip( rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), token: str = Depends(get_token), ): - if user is None: - raise HTTPException( - status_code=401, detail=f"User not found. Session might have expired." - ) - if file.filename.endswith(".zip") == False: raise HTTPException(status_code=404, detail=f"File is not a zip file") @@ -776,11 +742,8 @@ async def download_dataset( fs: Minio = Depends(dependencies.get_fs), allow: bool = Depends(Authorization("viewer")), ): - if not allow: - raise HTTPException(status_code=404, detail=f"Dataset {dataset_id} not found") - if ( - dataset := await db["datasets"].find_one({"_id": ObjectId(dataset_id)}) - ) is not None: + dataset = await DatasetDB.find_one(DatasetDB.id == ObjectId(dataset_id)) + if dataset: dataset = DatasetOut(**dataset) current_temp_dir = tempfile.mkdtemp(prefix="rocratedownload") crate = ROCrate() @@ -927,8 +890,7 @@ async def download_dataset( media_type="application/x-zip-compressed", headers={"Content-Disposition": f'attachment;filename="{zip_name}"'}, ) - else: - raise HTTPException(status_code=404, detail=f"Dataset {dataset_id} not found") + raise HTTPException(status_code=404, detail=f"Dataset {dataset_id} not found") # submits file to extractor @@ -946,11 +908,8 @@ async def get_dataset_extract( rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), allow: bool = Depends(Authorization("uploader")), ): - if not allow: - raise HTTPException(status_code=404, detail=f"Dataset {dataset_id} not found") - if ( - dataset := await db["datasets"].find_one({"_id": ObjectId(dataset_id)}) - ) is not None: + dataset = await DatasetDB.find_one(DatasetDB.id == ObjectId(dataset_id)) + if dataset: dataset_out = DatasetOut.from_mongo(dataset) access_token = credentials.credentials req_headers = request.headers @@ -988,5 +947,4 @@ async def get_dataset_extract( ) return job_id - else: - raise HTTPException(status_code=404, detail=f"File {dataset_id} not found") + raise HTTPException(status_code=404, detail=f"File {dataset_id} not found") From f7e0046119e2a3261f1db4a1957666d11b1b7384 Mon Sep 17 00:00:00 2001 From: Max Burnette Date: Mon, 8 May 2023 11:23:45 -0500 Subject: [PATCH 23/32] update metadata usage --- backend/app/routers/datasets.py | 56 ++++----------------------------- backend/app/routers/files.py | 12 +++---- 2 files changed, 12 insertions(+), 56 deletions(-) diff --git a/backend/app/routers/datasets.py b/backend/app/routers/datasets.py index ea6b57b8c..4aa653cd8 100644 --- a/backend/app/routers/datasets.py +++ b/backend/app/routers/datasets.py @@ -255,22 +255,6 @@ async def get_datasets( skip=skip, limit=limit, ).to_list() - # for doc in ( - # await db["datasets_view"] - # .find( - # { - # "$and": [ - # {"author.email": user_id}, - # {"auth": {"$elemMatch": {"user_ids": user_id}}}, - # ] - # } - # ) - # .sort([("created", DESCENDING)]) - # .skip(skip) - # .limit(limit) - # .to_list(length=limit) - # ): - # datasets.append(DatasetOut.from_mongo(doc)) else: return await DatasetDBViewList.find( { @@ -283,23 +267,6 @@ async def get_datasets( skip=skip, limit=limit, ).to_list() - # for doc in ( - # await db["datasets_view"] - # .find( - # { - # "$or": [ - # {"author.email": user_id}, - # {"auth": {"$elemMatch": {"user_ids": user_id}}}, - # ] - # } - # ) - # .sort([("created", DESCENDING)]) - # .skip(skip) - # .limit(limit) - # .to_list(length=limit) - # ): - # datasets.append(DatasetOut.from_mongo(doc)) - # return datasets @router.get("/{dataset_id}", response_model=DatasetOut) @@ -309,13 +276,6 @@ async def get_dataset( allow: bool = Depends(Authorization("viewer")), ): return await DatasetDB.get(dataset_id) - # try: - # if ( - # dataset := await db["datasets"].find_one({"_id": ObjectId(dataset_id)}) - # ) is not None: - # return DatasetOut.from_mongo(dataset) - # except: - # raise HTTPException(status_code=404, detail=f"Dataset {dataset_id} not found") @router.get("/{dataset_id}/files", response_model=List[FileOut]) @@ -768,11 +728,9 @@ async def download_dataset( f.write("Tag-File-Character-Encoding: UTF-8" + "\n") # Write dataset metadata if found - metadata = [] - async for md in db["metadata"].find( - {"resource.resource_id": ObjectId(dataset_id)} - ): - metadata.append(md) + metadata = await MetadataDB.find( + MetadataDB.resource.resource_id == ObjectId(dataset_id) + ) if len(metadata) > 0: datasetmetadata_path = os.path.join( current_temp_dir, "_dataset_metadata.json" @@ -818,11 +776,9 @@ async def download_dataset( current_file_size = os.path.getsize(current_file_path) bag_size += current_file_size - metadata = [] - async for md in db["metadata"].find( - {"resource.resource_id": ObjectId(file.id)} - ): - metadata.append(md) + metadata = await MetadataDB.find( + MetadataDB.resource.resource_id == ObjectId(dataset_id) + ) if len(metadata) > 0: metadata_filename = file_name + "_metadata.json" metadata_filename_temp_path = os.path.join( diff --git a/backend/app/routers/files.py b/backend/app/routers/files.py index c2bcd9a1c..6e01d1846 100644 --- a/backend/app/routers/files.py +++ b/backend/app/routers/files.py @@ -23,6 +23,7 @@ from app.deps.authorization_deps import FileAuthorization from app.keycloak_auth import get_current_user, get_token from app.models.files import FileOut, FileVersion, FileContentType, FileDB +from app.models.metadata import MetadataDB from app.models.users import UserOut from app.rabbitmq.listeners import submit_file_job, EventListenerJob from app.routers.feeds import check_feed_listeners @@ -266,11 +267,10 @@ async def update_file( ) # updating metadata in elasticsearch - if ( - metadata := await db["metadata"].find_one( - {"resource.resource_id": ObjectId(updated_file.id)} - ) - ) is not None: + metadata = MetadataDB.find_one( + MetadataDB.resource.resource_id == ObjectId(updated_file.id) + ) + if metadata: doc = { "doc": { "name": updated_file.name, @@ -280,7 +280,7 @@ async def update_file( "bytes": updated_file.bytes, } } - update_record(es, "metadata", doc, str(metadata["_id"])) + update_record(es, "metadata", doc, str(metadata.id)) return updated_file else: raise HTTPException(status_code=404, detail=f"File {file_id} not found") From 4ab69dbf5eaee08c0b1b6f27cd11a41022f9d926 Mon Sep 17 00:00:00 2001 From: Max Burnette Date: Mon, 8 May 2023 11:24:32 -0500 Subject: [PATCH 24/32] formatting --- backend/app/routers/metadata.py | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/backend/app/routers/metadata.py b/backend/app/routers/metadata.py index 732ceb5b0..8a32d4b83 100644 --- a/backend/app/routers/metadata.py +++ b/backend/app/routers/metadata.py @@ -26,8 +26,8 @@ @router.post("/definition", response_model=MetadataDefinitionOut) async def save_metadata_definition( - definition_in: MetadataDefinitionIn, - user=Depends(get_current_user), + definition_in: MetadataDefinitionIn, + user=Depends(get_current_user), ): existing = await MetadataDefinitionDB.find_one( MetadataDefinitionDB.name == definition_in.name @@ -44,10 +44,10 @@ async def save_metadata_definition( @router.get("/definition", response_model=List[MetadataDefinitionOut]) async def get_metadata_definition( - name: Optional[str] = None, - user=Depends(get_current_user), - skip: int = 0, - limit: int = 2, + name: Optional[str] = None, + user=Depends(get_current_user), + skip: int = 0, + limit: int = 2, ): if name is None: return ( @@ -67,11 +67,11 @@ async def get_metadata_definition( @router.patch("/{metadata_id}", response_model=MetadataOut) async def update_metadata( - metadata_in: MetadataPatch, - metadata_id: str, - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - user=Depends(get_current_user), - allow: bool = Depends(MetadataAuthorization("editor")), + metadata_in: MetadataPatch, + metadata_id: str, + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + user=Depends(get_current_user), + allow: bool = Depends(MetadataAuthorization("editor")), ): """Update metadata. Any fields provided in the contents JSON will be added or updated in the metadata. If context or agent should be changed, use PUT. @@ -89,9 +89,9 @@ async def update_metadata( @router.delete("/{metadata_id}") async def delete_metadata( - metadata_id: str, - user=Depends(get_current_user), - allow: bool = Depends(MetadataAuthorization("editor")), + metadata_id: str, + user=Depends(get_current_user), + allow: bool = Depends(MetadataAuthorization("editor")), ): """Delete metadata by specific ID.""" md = await MetadataDB.find_one(MetadataDB.id == PyObjectId(metadata_id)) From 80b8d07081591a871155b3dca83a82476621621d Mon Sep 17 00:00:00 2001 From: Max Burnette Date: Mon, 8 May 2023 11:30:03 -0500 Subject: [PATCH 25/32] replace db["datasets"] --- backend/app/routers/authorization.py | 38 +++++++++++++--------------- backend/app/routers/datasets.py | 35 ++++++++----------------- 2 files changed, 28 insertions(+), 45 deletions(-) diff --git a/backend/app/routers/authorization.py b/backend/app/routers/authorization.py index c42b88644..0cb32d869 100644 --- a/backend/app/routers/authorization.py +++ b/backend/app/routers/authorization.py @@ -23,7 +23,13 @@ AuthorizationOut, RoleType, ) -from app.models.datasets import DatasetOut, UserAndRole, GroupAndRole, DatasetRoles +from app.models.datasets import ( + DatasetOut, + UserAndRole, + GroupAndRole, + DatasetRoles, + DatasetDB, +) from app.models.groups import GroupOut from app.models.pyobjectid import PyObjectId from app.models.users import UserOut @@ -168,10 +174,8 @@ async def set_dataset_group_role( allow: bool = Depends(Authorization("editor")), ): """Assign an entire group a specific role for a dataset.""" - if ( - dataset_q := await db["datasets"].find_one({"_id": ObjectId(dataset_id)}) - ) is not None: - dataset = DatasetOut.from_mongo(dataset_q) + dataset = await DatasetDB.find_one(DatasetDB.id == ObjectId(dataset_id)) + if dataset: if ( group_q := await db["groups"].find_one({"_id": ObjectId(group_id)}) ) is not None: @@ -232,10 +236,8 @@ async def set_dataset_user_role( ): """Assign a single user a specific role for a dataset.""" - if ( - dataset_q := await db["datasets"].find_one({"_id": ObjectId(dataset_id)}) - ) is not None: - dataset = DatasetOut.from_mongo(dataset_q) + dataset = await DatasetDB.find_one(DatasetDB.id == ObjectId(dataset_id)) + if dataset: if (user_q := await db["users"].find_one({"email": username})) is not None: # First, remove any existing role the user has on the dataset await remove_dataset_user_role(dataset_id, username, db, user_id, allow) @@ -294,10 +296,8 @@ async def remove_dataset_group_role( ): """Remove any role the group has with a specific dataset.""" - if ( - dataset_q := await db["datasets"].find_one({"_id": ObjectId(dataset_id)}) - ) is not None: - dataset = DatasetOut.from_mongo(dataset_q) + dataset = await DatasetDB.find_one(DatasetDB.id == ObjectId(dataset_id)) + if dataset: if ( group_q := await db["groups"].find_one({"_id": ObjectId(group_id)}) ) is not None: @@ -345,10 +345,8 @@ async def remove_dataset_user_role( ): """Remove any role the user has with a specific dataset.""" - if ( - dataset_q := await db["datasets"].find_one({"_id": ObjectId(dataset_id)}) - ) is not None: - dataset = DatasetOut.from_mongo(dataset_q) + dataset = await DatasetDB.find_one(DatasetDB.id == ObjectId(dataset_id)) + if dataset: if (user_q := await db["users"].find_one({"email": username})) is not None: auth_db = await AuthorizationDB.find_one( AuthorizationDB.dataset_id == PyObjectId(dataset_id), @@ -380,10 +378,8 @@ async def get_dataset_roles( allow: bool = Depends(Authorization("editor")), ): """Get a list of all users and groups that have assigned roles on this dataset.""" - if ( - dataset_q := await db["datasets"].find_one({"_id": ObjectId(dataset_id)}) - ) is not None: - dataset = DatasetOut.from_mongo(dataset_q) + dataset = await DatasetDB.find_one(DatasetDB.id == ObjectId(dataset_id)) + if dataset: roles = DatasetRoles(dataset_id=str(dataset.id)) async for auth_q in db["authorization"].find( diff --git a/backend/app/routers/datasets.py b/backend/app/routers/datasets.py index 4aa653cd8..537c3aef3 100644 --- a/backend/app/routers/datasets.py +++ b/backend/app/routers/datasets.py @@ -201,25 +201,14 @@ async def save_dataset( raise HTTPException(status_code=503, detail="Service not available") return - dataset_out = await DatasetDB(**dataset_in.dict(), author=user).insert() - # dataset_db = DatasetDB(**dataset_in.dict(), author=user) - # new_dataset = await db["datasets"].insert_one(dataset_db.to_mongo()) - # found = await db["datasets"].find_one({"_id": new_dataset.inserted_id}) - # dataset_out = DatasetOut.from_mongo(found) + dataset_out = await DatasetDB(**dataset_in.dict(), author=user).save() # Create authorization entry await AuthorizationDB( dataset_id=dataset_out.id, role=RoleType.OWNER, creator=user.email, - ).insert() - # await db["authorization"].insert_one( - # AuthorizationDB( - # dataset_id=dataset_out.id, - # role=RoleType.OWNER, - # creator=user.email, - # ).to_mongo() - # ) + ).save() # Add en entry to the dataset index doc = { @@ -433,9 +422,10 @@ async def delete_dataset( query = {"match": {"resource_id": dataset_id}} delete_document_by_query(es, "metadata", query) # delete dataset first to minimize files/folder being uploaded to a delete dataset - - await db["datasets"].delete_one({"_id": ObjectId(dataset_id)}) - await db.metadata.delete_many({"resource.resource_id": ObjectId(dataset_id)}) + await dataset.delete() + await MetadataDB.delete_all( + MetadataDB.resource.resource_id == ObjectId(dataset_id) + ) async for file in db["files"].find({"dataset_id": ObjectId(dataset_id)}): file = FileOut(**file) await remove_file_entry(file.id, db, fs, es) @@ -643,12 +633,11 @@ async def create_dataset_from_zip( dataset_description = "Uploaded as %s" % file.filename ds_dict = {"name": dataset_name, "description": dataset_description} dataset_db = DatasetDB(**ds_dict, author=user) - new_dataset = await db["datasets"].insert_one(dataset_db.to_mongo()) - dataset_id = new_dataset.inserted_id + dataset_db.save() # Create folders folder_lookup = await _create_folder_structure( - dataset_id, zip_directory, "", {}, user, db + dataset_db.id, zip_directory, "", {}, user, db ) # Go back through zipfile, this time uploading files to folders @@ -668,12 +657,12 @@ async def create_dataset_from_zip( fileDB = FileDB( name=filename, creator=user, - dataset_id=dataset_id, + dataset_id=dataset_db.id, folder_id=folder_id, ) else: fileDB = FileDB( - name=filename, creator=user, dataset_id=dataset_id + name=filename, creator=user, dataset_id=dataset_db.id ) with open(extracted, "rb") as file_reader: await add_file_entry( @@ -689,9 +678,7 @@ async def create_dataset_from_zip( if os.path.isfile(extracted): os.remove(extracted) - found = await db["datasets"].find_one({"_id": new_dataset.inserted_id}) - dataset_out = DatasetOut.from_mongo(found) - return dataset_out + return dataset_db @router.get("/{dataset_id}/download", response_model=DatasetOut) From 61356147caf81730ca15b9a462ada30dc4503c40 Mon Sep 17 00:00:00 2001 From: Max Burnette Date: Mon, 8 May 2023 14:26:33 -0500 Subject: [PATCH 26/32] Update metadata.py --- backend/app/models/metadata.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/backend/app/models/metadata.py b/backend/app/models/metadata.py index 1f09633ab..2311a67ce 100644 --- a/backend/app/models/metadata.py +++ b/backend/app/models/metadata.py @@ -53,7 +53,7 @@ class MetadataField(BaseModel): required: bool = False # Whether the definition requires this field -class MetadataDefinitionBase(Document): +class MetadataDefinitionBase(BaseModel): """This describes a metadata object with a short name and description, predefined set of fields, and context. These provide a shorthand for use by listeners as well as a source for building GUI widgets to add new entries. @@ -106,7 +106,7 @@ class MetadataDefinitionIn(MetadataDefinitionBase): pass -class MetadataDefinitionDB(MetadataDefinitionBase): +class MetadataDefinitionDB(Document, MetadataDefinitionBase): creator: UserOut @@ -170,7 +170,7 @@ class MetadataAgent(BaseModel): listener: Optional[EventListenerOut] -class MetadataBase(Document): +class MetadataBase(BaseModel): context: Optional[ List[Union[dict, AnyUrl]] ] # https://json-ld.org/spec/latest/json-ld/#the-context @@ -220,7 +220,7 @@ class MetadataDelete(BaseModel): extractor_info: Optional[ExtractorInfo] -class MetadataDB(MetadataBase): +class MetadataDB(Document, MetadataBase): resource: MongoDBRef agent: MetadataAgent created: datetime = Field(default_factory=datetime.utcnow) From 4a0f7e9f51ed727b62e9d70eb3e075cff4d6321c Mon Sep 17 00:00:00 2001 From: Max Burnette Date: Tue, 9 May 2023 08:55:15 -0500 Subject: [PATCH 27/32] ObjectID & response cleanups --- backend/app/deps/authorization_deps.py | 114 +++-------------------- backend/app/models/datasets.py | 1 + backend/app/models/feeds.py | 6 +- backend/app/models/listeners.py | 2 + backend/app/routers/datasets.py | 70 ++++++-------- backend/app/routers/feeds.py | 97 +++++++++---------- backend/app/routers/files.py | 2 +- backend/app/routers/listeners.py | 45 +++++---- backend/app/routers/metadata_datasets.py | 27 +++--- 9 files changed, 130 insertions(+), 234 deletions(-) diff --git a/backend/app/deps/authorization_deps.py b/backend/app/deps/authorization_deps.py index 931cb0314..3e4b2b34c 100644 --- a/backend/app/deps/authorization_deps.py +++ b/backend/app/deps/authorization_deps.py @@ -15,7 +15,6 @@ async def get_role( dataset_id: str, - db: MongoClient = Depends(get_db), current_user=Depends(get_current_username), ) -> RoleType: """Returns the role a specific user has on a dataset. If the user is a creator (owner), they are not listed in @@ -28,16 +27,6 @@ async def get_role( ), ) return authorization.role - # authorization = await db["authorization"].find_one( - # { - # "$and": [ - # {"dataset_id": ObjectId(dataset_id)}, - # {"$or": [{"creator": current_user}, {"user_ids": current_user}]}, - # ] - # } - # ) - # role = AuthorizationDB.from_mongo(authorization).role - # return role async def get_role_by_file( @@ -55,27 +44,19 @@ async def get_role_by_file( ), ) return authorization.role - # authorization = await db["authorization"].find_one( - # { - # "$and": [ - # {"dataset_id": ObjectId(file_out.dataset_id)}, - # {"$or": [{"creator": current_user}, {"user_ids": current_user}]}, - # ] - # } - # ) - # role = AuthorizationDB.from_mongo(authorization).role - # return role - raise HTTPException(status_code=404, detail=f"File {file_id} not found") +class PydanticObjectID: + pass + + async def get_role_by_metadata( metadata_id: str, db: MongoClient = Depends(get_db), current_user=Depends(get_current_username), ) -> RoleType: - md_out = await MetadataDB.find_one(MetadataDB.id == ObjectId(metadata_id)) - if md_out: + if (md_out := await MetadataDB.get(PydanticObjectID(metadata_id))) is not None: resource_type = md_out.resource.collection resource_id = md_out.resource.resource_id if resource_type == "files": @@ -92,12 +73,12 @@ async def get_role_by_metadata( ) return authorization.role elif resource_type == "datasets": - dataset_out = await DatasetDB.find_one( - DatasetDB.id == ObjectId(resource_id) - ) - if dataset_out: + + if ( + dataset := await DatasetDB.get(PydanticObjectID(resource_id)) + ) is not None: authorization = await AuthorizationDB.find_one( - AuthorizationDB.dataset_id == dataset_out.dataset_id, + AuthorizationDB.dataset_id == dataset.id, Or( AuthorizationDB.creator == current_user, AuthorizationDB.user_ids == current_user, @@ -150,22 +131,6 @@ async def __call__( AuthorizationDB.user_ids == current_user, ), ) - # if ( - # authorization_q := await db["authorization"].find_one( - # { - # "$and": [ - # {"dataset_id": ObjectId(dataset_id)}, - # { - # "$or": [ - # {"creator": current_user}, - # {"user_ids": current_user}, - # ] - # }, - # ] - # } - # ) - # ) is not None: - # authorization = AuthorizationDB.from_mongo(authorization_q) if authorization is not None: if access(authorization.role, self.role): return True @@ -203,22 +168,6 @@ async def __call__( AuthorizationDB.user_ids == current_user, ), ) - # if ( - # authorization_q := await db["authorization"].find_one( - # { - # "$and": [ - # {"dataset_id": ObjectId(file_out.dataset_id)}, - # { - # "$or": [ - # {"creator": current_user}, - # {"user_ids": current_user}, - # ] - # }, - # ] - # } - # ) - # ) is not None: - # authorization = AuthorizationDB.from_mongo(authorization_q) if authorization is not None: if access(authorization.role, self.role): return True @@ -243,8 +192,7 @@ async def __call__( db: MongoClient = Depends(get_db), current_user: str = Depends(get_current_username), ): - md_out = await MetadataDB.find_one(MetadataDB.id == ObjectId(metadata_id)) - if md_out: + if (md_out := await MetadataDB.get(PydanticObjectID(metadata_id))) is not None: resource_type = md_out.resource.collection resource_id = md_out.resource.resource_id if resource_type == "files": @@ -259,22 +207,6 @@ async def __call__( AuthorizationDB.user_ids == current_user, ), ) - # if ( - # authorization_q := await db["authorization"].find_one( - # { - # "$and": [ - # {"dataset_id": ObjectId(file_out.dataset_id)}, - # { - # "$or": [ - # {"creator": current_user}, - # {"user_ids": current_user}, - # ] - # }, - # ] - # } - # ) - # ) is not None: - # authorization = AuthorizationDB.from_mongo(authorization_q) if authorization is not None: if access(authorization.role, self.role): return True @@ -287,10 +219,10 @@ async def __call__( status_code=404, detail=f"Metadata {metadata_id} not found" ) elif resource_type == "datasets": - dataset_out = await DatasetDB.find_one( - DatasetDB.id == ObjectId(resource_id) - ) - if dataset_out: + if ( + dataset_out := await DatasetDB.get(PydanticObjectID(resource_id)) + is not None + ): authorization = await AuthorizationDB.find_one( AuthorizationDB.dataset_id == dataset_out.dataset_id, Or( @@ -298,22 +230,6 @@ async def __call__( AuthorizationDB.user_ids == current_user, ), ) - # if ( - # authorization_q := await db["authorization"].find_one( - # { - # "$and": [ - # {"dataset_id": ObjectId(dataset_out.id)}, - # { - # "$or": [ - # {"creator": current_user}, - # {"user_ids": current_user}, - # ] - # }, - # ] - # } - # ) - # ) is not None: - # authorization = AuthorizationDB.from_mongo(authorization_q) if authorization is not None: if access(authorization.role, self.role): return True diff --git a/backend/app/models/datasets.py b/backend/app/models/datasets.py index 27f8ea832..c577a206c 100644 --- a/backend/app/models/datasets.py +++ b/backend/app/models/datasets.py @@ -39,6 +39,7 @@ class DatasetPatch(BaseModel): class DatasetDB(Document, DatasetBase): + id: PydanticObjectId = Field(None, alias="_id") author: UserOut created: datetime = Field(default_factory=datetime.utcnow) modified: datetime = Field(default_factory=datetime.utcnow) diff --git a/backend/app/models/feeds.py b/backend/app/models/feeds.py index 7e28b4d40..28faab823 100644 --- a/backend/app/models/feeds.py +++ b/backend/app/models/feeds.py @@ -1,8 +1,8 @@ from typing import List import pymongo -from beanie import Document -from pydantic import BaseModel +from beanie import Document, PydanticObjectId +from pydantic import BaseModel, Field from app.models.authorization import Provenance from app.models.listeners import FeedListener @@ -27,6 +27,8 @@ class FeedIn(JobFeed): class FeedDB(Document, JobFeed, Provenance): + id: PydanticObjectId = Field(None, alias="_id") + class Settings: name = "feeds" indexes = [ diff --git a/backend/app/models/listeners.py b/backend/app/models/listeners.py index 1e98bb10c..1297e2632 100644 --- a/backend/app/models/listeners.py +++ b/backend/app/models/listeners.py @@ -113,6 +113,7 @@ class EventListenerJobStatus(str, Enum): class EventListenerJob(Document): """This summarizes a submission to an extractor. All messages from that extraction should include this job's ID.""" + id: PydanticObjectId = Field(None, alias="_id") listener_id: str resource_ref: MongoDBRef creator: UserOut @@ -172,6 +173,7 @@ class EventListenerDatasetJobMessage(BaseModel): class EventListenerJobUpdate(Document): """This is a status update message coming from the extractors back to Clowder.""" + id: PydanticObjectId = Field(None, alias="_id") job_id: str timestamp: datetime = Field(default_factory=datetime.utcnow) status: str diff --git a/backend/app/routers/datasets.py b/backend/app/routers/datasets.py index 093233cf2..e48063db9 100644 --- a/backend/app/routers/datasets.py +++ b/backend/app/routers/datasets.py @@ -8,6 +8,7 @@ from collections.abc import Mapping, Iterable from typing import List, Optional, Union +from beanie import PydanticObjectId from bson import ObjectId from bson import json_util from elasticsearch import Elasticsearch @@ -196,11 +197,6 @@ async def save_dataset( db: MongoClient = Depends(dependencies.get_db), es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), ): - # Check all connection and abort if any one of them is not available - if db is None or es is None: - raise HTTPException(status_code=503, detail="Service not available") - return - dataset_out = await DatasetDB(**dataset_in.dict(), author=user).save() # Create authorization entry @@ -232,6 +228,7 @@ async def get_datasets( limit: int = 10, mine: bool = False, ): + # TODO: Other endpoints convert DB response to DatasetOut(**response.dict()) if mine: return await DatasetDBViewList.find( { @@ -261,10 +258,11 @@ async def get_datasets( @router.get("/{dataset_id}", response_model=DatasetOut) async def get_dataset( dataset_id: str, - # db: MongoClient = Depends(dependencies.get_db), allow: bool = Depends(Authorization("viewer")), ): - return await DatasetDB.get(dataset_id) + if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: + return DatasetOut(**dataset.dict()) + raise HTTPException(status_code=404, detail=f"Dataset {dataset_id} not found") @router.get("/{dataset_id}/files", response_model=List[FileOut]) @@ -338,8 +336,7 @@ async def edit_dataset( es=Depends(dependencies.get_elasticsearchclient), allow: bool = Depends(Authorization("editor")), ): - dataset = await DatasetDB.find_one(DatasetDB.id == ObjectId(dataset_id)) - if dataset: + if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: # TODO: Refactor this with permissions checks etc. dataset.update(dataset_info) dataset.modified = datetime.datetime.utcnow() @@ -355,18 +352,19 @@ async def edit_dataset( } update_record(es, "dataset", doc, dataset_id) # updating metadata in elasticsearch - metadata = await MetadataDB.find_one( - MetadataDB.resource.resource_id == ObjectId(dataset_id) - ) - if metadata: + if ( + metadata := await MetadataDB.find_one( + MetadataDB.resource.resource_id == ObjectId(dataset_id) + ) + ) is not None: doc = { "doc": { "name": dataset.name, "description": dataset.description, } } - update_record(es, "metadata", doc, str(metadata["_id"])) - return dataset + update_record(es, "metadata", doc, str(metadata.id)) + return DatasetOut(**dataset.dict()) raise HTTPException(status_code=404, detail=f"Dataset {dataset_id} not found") @@ -378,8 +376,7 @@ async def patch_dataset( es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), allow: bool = Depends(Authorization("editor")), ): - dataset = await DatasetDB.find_one(DatasetDB.id == ObjectId(dataset_id)) - if dataset: + if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: # TODO: Refactor this with permissions checks etc. dataset.update(dataset_info) dataset.modified = datetime.datetime.utcnow() @@ -404,7 +401,7 @@ async def patch_dataset( } } update_record(es, "metadata", doc, str(metadata["_id"])) - return dataset + return DatasetOut(**dataset.dict()) @router.delete("/{dataset_id}") @@ -415,8 +412,7 @@ async def delete_dataset( es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), allow: bool = Depends(Authorization("editor")), ): - dataset = await DatasetDB.find_one(DatasetDB.id == ObjectId(dataset_id)) - if dataset: + if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: # delete from elasticsearch delete_document_by_id(es, "dataset", dataset_id) query = {"match": {"resource_id": dataset_id}} @@ -442,8 +438,7 @@ async def add_folder( db: MongoClient = Depends(dependencies.get_db), allow: bool = Depends(Authorization("uploader")), ): - dataset = await DatasetDB.find_one(DatasetDB.id == ObjectId(dataset_id)) - if dataset: + if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: folder_dict = folder_in.dict() folder_db = FolderDB( **folder_in.dict(), author=user, dataset_id=PyObjectId(dataset_id) @@ -470,8 +465,7 @@ async def get_dataset_folders( db: MongoClient = Depends(dependencies.get_db), allow: bool = Depends(Authorization("viewer")), ): - dataset = await DatasetDB.find_one(DatasetDB.id == ObjectId(dataset_id)) - if dataset: + if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: folders = [] if parent_folder is None: async for f in db["folders"].find( @@ -509,8 +503,7 @@ async def delete_folder( es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), allow: bool = Depends(Authorization("editor")), ): - dataset = await DatasetDB.find_one(DatasetDB.id == ObjectId(dataset_id)) - if dataset: + if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: if (await db["folders"].find_one({"_id": ObjectId(folder_id)})) is not None: # delete current folder and files await remove_folder_entry(folder_id, db) @@ -579,8 +572,7 @@ async def save_file( credentials: HTTPAuthorizationCredentials = Security(security), allow: bool = Depends(Authorization("uploader")), ): - dataset = await DatasetDB.find_one(DatasetDB.id == ObjectId(dataset_id)) - if dataset: + if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: if user is None: raise HTTPException( status_code=401, detail=f"User not found. Session might have expired." @@ -641,12 +633,12 @@ async def create_dataset_from_zip( dataset_name = file.filename.rstrip(".zip") dataset_description = "Uploaded as %s" % file.filename ds_dict = {"name": dataset_name, "description": dataset_description} - dataset_db = DatasetDB(**ds_dict, author=user) - dataset_db.save() + dataset = DatasetDB(**ds_dict, author=user) + dataset.save() # Create folders folder_lookup = await _create_folder_structure( - dataset_db.id, zip_directory, "", {}, user, db + dataset.id, zip_directory, "", {}, user, db ) # Go back through zipfile, this time uploading files to folders @@ -666,12 +658,12 @@ async def create_dataset_from_zip( fileDB = FileDB( name=filename, creator=user, - dataset_id=dataset_db.id, + dataset_id=dataset.id, folder_id=folder_id, ) else: fileDB = FileDB( - name=filename, creator=user, dataset_id=dataset_db.id + name=filename, creator=user, dataset_id=dataset.id ) with open(extracted, "rb") as file_reader: await add_file_entry( @@ -687,7 +679,7 @@ async def create_dataset_from_zip( if os.path.isfile(extracted): os.remove(extracted) - return dataset_db + return DatasetOut(**dataset.dict()) @router.get("/{dataset_id}/download", response_model=DatasetOut) @@ -698,8 +690,7 @@ async def download_dataset( fs: Minio = Depends(dependencies.get_fs), allow: bool = Depends(Authorization("viewer")), ): - dataset = await DatasetDB.find_one(DatasetDB.id == ObjectId(dataset_id)) - if dataset: + if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: dataset = DatasetOut(**dataset) current_temp_dir = tempfile.mkdtemp(prefix="rocratedownload") crate = ROCrate() @@ -856,13 +847,10 @@ async def get_dataset_extract( parameters: dict = None, user=Depends(get_current_user), credentials: HTTPAuthorizationCredentials = Security(security), - db: MongoClient = Depends(dependencies.get_db), rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), allow: bool = Depends(Authorization("uploader")), ): - dataset = await DatasetDB.find_one(DatasetDB.id == ObjectId(dataset_id)) - if dataset: - dataset_out = DatasetOut.from_mongo(dataset) + if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: access_token = credentials.credentials req_headers = request.headers raw = req_headers.raw @@ -890,7 +878,7 @@ async def get_dataset_extract( current_routing_key = current_queue job_id = await submit_dataset_job( - dataset_out, + DatasetOut(**dataset.dict()), current_routing_key, parameters, user, diff --git a/backend/app/routers/feeds.py b/backend/app/routers/feeds.py index 0e4034888..e0a51fc9d 100644 --- a/backend/app/routers/feeds.py +++ b/backend/app/routers/feeds.py @@ -1,12 +1,10 @@ from typing import List, Optional -from bson import ObjectId +from beanie import PydanticObjectId +from beanie.operators import NE from fastapi import APIRouter, HTTPException, Depends from pika.adapters.blocking_connection import BlockingChannel -from pymongo import MongoClient -from beanie.operators import NE -from app.dependencies import get_db from app.keycloak_auth import get_current_user, get_current_username from app.models.feeds import ( FeedIn, @@ -17,7 +15,6 @@ from app.models.listeners import ( FeedListener, EventListenerDB, - EventListenerOut, ) from app.models.users import UserOut from app.rabbitmq.listeners import submit_file_job @@ -32,8 +29,7 @@ async def disassociate_listener_db(feed_id: str, listener_id: str): This actually performs the database operations, and can be used by any endpoints that need this functionality. """ - feed = FeedDB.find_one(FeedDB.id == ObjectId(feed_id)) - if feed: + if (feed := await FeedDB.get(PydanticObjectId(feed_id))) is not None: new_listeners = [] for feed_listener in feed.listeners: if feed_listener.listener_id != listener_id: @@ -43,15 +39,14 @@ async def disassociate_listener_db(feed_id: str, listener_id: str): async def check_feed_listeners( - es_client, - file_out: FileOut, - user: UserOut, - db: MongoClient, - rabbitmq_client: BlockingChannel, - token: str, + es_client, + file_out: FileOut, + user: UserOut, + rabbitmq_client: BlockingChannel, + token: str, ): """Automatically submit new file to listeners on feeds that fit the search criteria.""" - listeners_found = [] + listener_ids_found = [] feeds = await FeedDB.find(NE(FeedDB.listeners, [])) for feed in feeds: # Only proceed if feed actually has auto-triggering listeners @@ -61,12 +56,11 @@ async def check_feed_listeners( if feed_match: for listener in feed.listeners: if listener.automatic: - listeners_found.append(listener.listener_id) - for targ_listener in listeners_found: - listener_info = EventListenerDB.find( - EventListenerDB.id == ObjectId(targ_listener) - ) - if listener_info: + listener_ids_found.append(listener.listener_id) + for targ_listener in listener_ids_found: + if ( + listener_info := await EventListenerDB.get(PydanticObjectId(targ_listener)) + ) is not None: await submit_file_job( file_out, listener_info.name, # routing_key @@ -75,25 +69,26 @@ async def check_feed_listeners( rabbitmq_client, token, ) - return listeners_found + return listener_ids_found @router.post("", response_model=FeedOut) async def save_feed( - feed_in: FeedIn, - user=Depends(get_current_username), + feed_in: FeedIn, + user=Depends(get_current_username), ): """Create a new Feed (i.e. saved search) in the database.""" feed = FeedDB(**feed_in.dict(), creator=user) - return await feed.save() + await feed.save() + return FeedOut(**feed.dict()) @router.get("", response_model=List[FeedOut]) async def get_feeds( - name: Optional[str] = None, - user=Depends(get_current_user), - skip: int = 0, - limit: int = 10, + name: Optional[str] = None, + user=Depends(get_current_user), + skip: int = 0, + limit: int = 10, ): """Fetch all existing Feeds.""" if name is not None: @@ -116,35 +111,33 @@ async def get_feeds( @router.get("/{feed_id}", response_model=FeedOut) async def get_feed( - feed_id: str, - user=Depends(get_current_user), + feed_id: str, + user=Depends(get_current_user), ): """Fetch an existing saved search Feed.""" - feed = await FeedDB.find_one(FeedDB.id == ObjectId(feed_id)) - if feed: - return feed + if (feed := await FeedDB.get(PydanticObjectId(feed_id))) is not None: + return FeedOut(**feed.dict()) else: raise HTTPException(status_code=404, detail=f"Feed {feed_id} not found") @router.delete("/{feed_id}") async def delete_feed( - feed_id: str, - user=Depends(get_current_user), + feed_id: str, + user=Depends(get_current_user), ): """Delete an existing saved search Feed.""" - feed = await FeedDB.find_one(FeedDB.id == ObjectId(feed_id)) - if feed: - await FeedDB.delete(FeedDB.id == ObjectId(feed_id)) + if (feed := await FeedDB.get(PydanticObjectId(feed_id))) is not None: + await feed.delete() return {"deleted": feed_id} raise HTTPException(status_code=404, detail=f"Feed {feed_id} not found") @router.post("/{feed_id}/listeners", response_model=FeedOut) async def associate_listener( - feed_id: str, - listener: FeedListener, - user=Depends(get_current_user), + feed_id: str, + listener: FeedListener, + user=Depends(get_current_user), ): """Associate an existing Event Listener with a Feed, e.g. so it will be triggered on new Feed results. @@ -152,14 +145,13 @@ async def associate_listener( feed_id: Feed that should have new Event Listener associated listener: JSON object with "listener_id" field and "automatic" bool field (whether to auto-trigger on new data) """ - feed = await FeedDB.find_one(FeedDB.id == ObjectId(feed_id)) - if feed: - exists = await EventListenerDB.find_one( - EventListenerDB.id == ObjectId(listener.listener_id) - ) - if exists: + if (feed := await FeedDB.get(PydanticObjectId(feed_id))) is not None: + if ( + exists := await EventListenerDB.get(PydanticObjectId(listener.listener_id)) + ) is not None: feed.listeners.append(listener) - return await feed.save() + await feed.save() + return FeedOut(**feed.dict()) raise HTTPException( status_code=404, detail=f"listener {listener.listener_id} not found" ) @@ -168,9 +160,9 @@ async def associate_listener( @router.delete("/{feed_id}/listeners/{listener_id}", response_model=FeedOut) async def disassociate_listener( - feed_id: str, - listener_id: str, - user=Depends(get_current_user), + feed_id: str, + listener_id: str, + user=Depends(get_current_user), ): """Disassociate an Event Listener from a Feed. @@ -178,8 +170,7 @@ async def disassociate_listener( feed_id: UUID of search Feed that is being changed listener_id: UUID of Event Listener that should be disassociated """ - feed = await FeedDB.find_one(FeedDB.id == ObjectId(feed_id)) - if feed: + if (feed := await FeedDB.get(PydanticObjectId(feed_id))) is not None: await disassociate_listener_db(feed_id, listener_id) return {"disassociated": listener_id} raise HTTPException(status_code=404, detail=f"feed {feed_id} not found") diff --git a/backend/app/routers/files.py b/backend/app/routers/files.py index 6e01d1846..b535d9f65 100644 --- a/backend/app/routers/files.py +++ b/backend/app/routers/files.py @@ -159,7 +159,7 @@ async def add_file_entry( insert_record(es, "file", doc, file_db.id) # Submit file job to any qualifying feeds - await check_feed_listeners(es, file_out, user, db, rabbitmq_client, token) + await check_feed_listeners(es, file_out, user, rabbitmq_client, token) # TODO: Move this to MongoDB middle layer diff --git a/backend/app/routers/listeners.py b/backend/app/routers/listeners.py index 5337d535e..8c02d30eb 100644 --- a/backend/app/routers/listeners.py +++ b/backend/app/routers/listeners.py @@ -5,6 +5,7 @@ import string from typing import List, Optional +from beanie import PydanticObjectId from bson import ObjectId from fastapi import APIRouter, HTTPException, Depends from packaging import version @@ -35,6 +36,7 @@ async def _process_incoming_v1_extractor_info( extractor_id: str, process: dict, ): + """Return FeedDB object given v1 extractor info.""" if "file" in process: # Create a MIME-based feed for this v1 extractor criteria_list = [] @@ -98,7 +100,7 @@ async def save_listener( listener = EventListenerDB(**listener_in.dict(), creator=user) # TODO: Check for duplicates somehow? await listener.save() - return listener + return EventListenerOut(**listener.dict()) @legacy_router.post("", response_model=EventListenerOut) @@ -123,11 +125,10 @@ async def save_legacy_listener( if version.parse(listener.version) > version.parse(existing.version): await listener.save() # TODO: Should older extractor version entries be deleted? - # await EventListenerDB.delete(EventListenerDB.id == existing.id) - return listener + return EventListenerOut(**listener.dict()) else: # TODO: Should this fail the POST instead? - return existing + return EventListenerOut(**existing.dict()) else: # Register new listener await listener.save() @@ -136,7 +137,7 @@ async def save_legacy_listener( await _process_incoming_v1_extractor_info( legacy_in.name, listener.id, listener.properties.process ) - return listener + return EventListenerOut(**listener.dict()) @router.get("/search", response_model=List[EventListenerOut]) @@ -177,9 +178,10 @@ async def list_default_labels(user=Depends(get_current_username)): @router.get("/{listener_id}", response_model=EventListenerOut) async def get_listener(listener_id: str, user=Depends(get_current_username)): """Return JSON information about an Event Listener if it exists.""" - listener = EventListenerDB.find_one(EventListenerDB.id == ObjectId(listener_id)) - if listener: - return listener + if ( + listener := EventListenerDB.find_one(PydanticObjectId(listener_id)) + ) is not None: + return EventListenerOut(**listener.dict()) raise HTTPException(status_code=404, detail=f"listener {listener_id} not found") @@ -199,22 +201,17 @@ async def get_listeners( category -- filter by category has to be exact match label -- filter by label has to be exact match """ - if category and label: - query = { - "$and": [ - {"properties.categories": category}, - {"properties.defaultLabels": label}, - ] - } - elif category: - query = {"properties.categories": category} - elif label: - query = {"properties.defaultLabels": label} - else: - query = {} + query = [] + if category: + query.append(EventListenerDB.properties.categories == category) + if label: + query.append(EventListenerDB.properties.default_labels == label) return ( - await EventListenerDB.find(query).skip(skip).limit(limit).to_list(length=limit) + await EventListenerDB.find(**query) + .skip(skip) + .limit(limit) + .to_list(length=limit) ) @@ -222,7 +219,6 @@ async def get_listeners( async def edit_listener( listener_id: str, listener_in: EventListenerIn, - db: MongoClient = Depends(get_db), user_id=Depends(get_user), ): """Update the information about an existing Event Listener.. @@ -238,7 +234,8 @@ async def edit_listener( listener_update["modified"] = datetime.datetime.utcnow() try: listener.update(listener_update) - return await EventListenerDB(**listener).save() + await listener.save() + return EventListenerOut(**listener.dict()) except Exception as e: raise HTTPException(status_code=500, detail=e.args[0]) raise HTTPException(status_code=404, detail=f"listener {listener_id} not found") diff --git a/backend/app/routers/metadata_datasets.py b/backend/app/routers/metadata_datasets.py index faf0b3e86..27c3b718f 100644 --- a/backend/app/routers/metadata_datasets.py +++ b/backend/app/routers/metadata_datasets.py @@ -39,6 +39,7 @@ async def _build_metadata_db_obj( user: UserOut, agent: MetadataAgent = None, ): + """Convenience function for converting MetadataIn to MetadataDB object.""" content = await validate_context( metadata_in.content, metadata_in.definition, @@ -87,8 +88,8 @@ async def add_dataset_metadata( Returns: Metadata document that was added to database """ - dataset = DatasetDB.find_one(DatasetDB.id == ObjectId(dataset_id)) - if dataset: + + if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: # If dataset already has metadata using this definition, don't allow duplication definition = metadata_in.definition query = [] @@ -128,6 +129,7 @@ async def add_dataset_metadata( "author": dataset.author.email, "description": dataset.description, } + insert_record(es, "metadata", doc, md.id) return MetadataOut(**md.dict()) @@ -146,8 +148,7 @@ async def replace_dataset_metadata( Returns: Metadata document that was updated """ - dataset = DatasetDB.find_one(DatasetDB.id == ObjectId(dataset_id)) - if dataset: + if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: query = [MetadataDB.resource.resource_id == ObjectId(dataset_id)] # Filter by MetadataAgent @@ -203,17 +204,17 @@ async def update_dataset_metadata( Returns: Metadata document that was updated """ - dataset = DatasetDB.find_one(DatasetDB.id == ObjectId(dataset_id)) - if dataset: + if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: query = {"resource.resource_id": ObjectId(dataset_id)} content = metadata_in.content if metadata_in.metadata_id is not None: # If a specific metadata_id is provided, validate the patch against existing context - existing = await MetadataDB.find_one( - MetadataDB.id == ObjectId(metadata_in.metadata_id) - ) - if existing: + if ( + existing := await MetadataDB.get( + PydanticObjectId(metadata_in.metadata_id) + ) + ) is not None: content = await validate_context( metadata_in.content, existing.definition, @@ -266,8 +267,7 @@ async def get_dataset_metadata( user=Depends(get_current_user), allow: bool = Depends(Authorization("viewer")), ): - dataset = await DatasetDB.get(PydanticObjectId(dataset_id)) - if dataset is not None: + if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: query = [MetadataDB.resource.resource_id == ObjectId(dataset_id)] if listener_name is not None: @@ -299,8 +299,7 @@ async def delete_dataset_metadata( es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), allow: bool = Depends(Authorization("editor")), ): - dataset = await DatasetDB.get(PydanticObjectId(dataset_id)) - if dataset is not None: + if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: # filter by metadata_id or definition query = [MetadataDB.resource.resource_id == ObjectId(dataset_id)] if metadata_in.metadata_id is not None: From 20fc6e0fc6ffa6206dc7af5d64bfa74eeb4a82eb Mon Sep 17 00:00:00 2001 From: Max Burnette Date: Tue, 9 May 2023 08:57:03 -0500 Subject: [PATCH 28/32] syntax fix --- backend/app/deps/authorization_deps.py | 13 +++++------- backend/app/models/pyobjectid.py | 28 -------------------------- 2 files changed, 5 insertions(+), 36 deletions(-) diff --git a/backend/app/deps/authorization_deps.py b/backend/app/deps/authorization_deps.py index 3e4b2b34c..482a10d99 100644 --- a/backend/app/deps/authorization_deps.py +++ b/backend/app/deps/authorization_deps.py @@ -1,3 +1,4 @@ +from beanie import PydanticObjectId from beanie.operators import Or from bson import ObjectId from fastapi import Depends, HTTPException @@ -47,16 +48,12 @@ async def get_role_by_file( raise HTTPException(status_code=404, detail=f"File {file_id} not found") -class PydanticObjectID: - pass - - async def get_role_by_metadata( metadata_id: str, db: MongoClient = Depends(get_db), current_user=Depends(get_current_username), ) -> RoleType: - if (md_out := await MetadataDB.get(PydanticObjectID(metadata_id))) is not None: + if (md_out := await MetadataDB.get(PydanticObjectId(metadata_id))) is not None: resource_type = md_out.resource.collection resource_id = md_out.resource.resource_id if resource_type == "files": @@ -75,7 +72,7 @@ async def get_role_by_metadata( elif resource_type == "datasets": if ( - dataset := await DatasetDB.get(PydanticObjectID(resource_id)) + dataset := await DatasetDB.get(PydanticObjectId(resource_id)) ) is not None: authorization = await AuthorizationDB.find_one( AuthorizationDB.dataset_id == dataset.id, @@ -192,7 +189,7 @@ async def __call__( db: MongoClient = Depends(get_db), current_user: str = Depends(get_current_username), ): - if (md_out := await MetadataDB.get(PydanticObjectID(metadata_id))) is not None: + if (md_out := await MetadataDB.get(PydanticObjectId(metadata_id))) is not None: resource_type = md_out.resource.collection resource_id = md_out.resource.resource_id if resource_type == "files": @@ -220,7 +217,7 @@ async def __call__( ) elif resource_type == "datasets": if ( - dataset_out := await DatasetDB.get(PydanticObjectID(resource_id)) + dataset_out := await DatasetDB.get(PydanticObjectId(resource_id)) is not None ): authorization = await AuthorizationDB.find_one( diff --git a/backend/app/models/pyobjectid.py b/backend/app/models/pyobjectid.py index b17f35353..893a44454 100644 --- a/backend/app/models/pyobjectid.py +++ b/backend/app/models/pyobjectid.py @@ -32,31 +32,3 @@ def validate(cls, v): except InvalidId: raise ValueError("Not a valid ObjectId") return str(v) - - -# -# class PydanticObjectId(ObjectId): -# """ -# Object Id field. Compatible with Pydantic. -# """ -# -# @classmethod -# def __get_validators__(cls): -# yield cls.validate -# -# @classmethod -# def validate(cls, v): -# if isinstance(v, bytes): -# v = v.decode("utf-8") -# try: -# return PydanticObjectId(v) -# except InvalidId: -# raise TypeError("Id must be of type PydanticObjectId") -# -# @classmethod -# def __modify_schema__(cls, field_schema): -# field_schema.update( -# type="string", -# examples=["5eb7cf5a86d9755df3a6c593", "5eb7cfb05e32e07750a1756a"], -# ) -# From 5572ee0276f164c667cbb2dc9c6a701e25acef76 Mon Sep 17 00:00:00 2001 From: Max Burnette Date: Tue, 9 May 2023 09:00:35 -0500 Subject: [PATCH 29/32] author -> creator --- backend/app/beanie_views_test.py | 2 +- backend/app/models/datasets.py | 2 +- backend/app/models/folders.py | 3 +- backend/app/routers/datasets.py | 10 +- backend/app/routers/metadata_datasets.py | 2 +- frontend/src/components/Explore.tsx | 169 +++++++++++------- frontend/src/components/files/FilesTable.tsx | 2 +- .../src/components/search/SearchResult.tsx | 13 +- 8 files changed, 121 insertions(+), 82 deletions(-) diff --git a/backend/app/beanie_views_test.py b/backend/app/beanie_views_test.py index e394163e3..dc3e9677f 100644 --- a/backend/app/beanie_views_test.py +++ b/backend/app/beanie_views_test.py @@ -85,7 +85,7 @@ async def example(): # print(results) results = await DatasetDBViewList.find( - DatasetDBViewList.author.email == "lmarini@illinois.edu" + DatasetDBViewList.creator.email == "lmarini@illinois.edu" ).to_list() print(results) diff --git a/backend/app/models/datasets.py b/backend/app/models/datasets.py index c577a206c..7c8cf4a02 100644 --- a/backend/app/models/datasets.py +++ b/backend/app/models/datasets.py @@ -40,7 +40,7 @@ class DatasetPatch(BaseModel): class DatasetDB(Document, DatasetBase): id: PydanticObjectId = Field(None, alias="_id") - author: UserOut + creator: UserOut created: datetime = Field(default_factory=datetime.utcnow) modified: datetime = Field(default_factory=datetime.utcnow) status: str = DatasetStatus.PRIVATE.name diff --git a/backend/app/models/folders.py b/backend/app/models/folders.py index 9230e4d42..07f916dde 100644 --- a/backend/app/models/folders.py +++ b/backend/app/models/folders.py @@ -1,5 +1,6 @@ from datetime import datetime from typing import Optional + from pydantic import Field from app.models.mongomodel import MongoModel @@ -18,7 +19,7 @@ class FolderIn(FolderBase): class FolderDB(FolderBase): dataset_id: PyObjectId parent_folder: Optional[PyObjectId] - author: UserOut + creator: UserOut created: datetime = Field(default_factory=datetime.utcnow) modified: datetime = Field(default_factory=datetime.utcnow) diff --git a/backend/app/routers/datasets.py b/backend/app/routers/datasets.py index e48063db9..37d197463 100644 --- a/backend/app/routers/datasets.py +++ b/backend/app/routers/datasets.py @@ -153,7 +153,7 @@ async def _create_folder_structure( "name": k, "parent_folder": parent_folder_id, } - folder_db = FolderDB(**folder_dict, author=user) + folder_db = FolderDB(**folder_dict, creator=user) new_folder = await db["folders"].insert_one(folder_db.to_mongo()) new_folder_id = new_folder.inserted_id @@ -197,7 +197,7 @@ async def save_dataset( db: MongoClient = Depends(dependencies.get_db), es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), ): - dataset_out = await DatasetDB(**dataset_in.dict(), author=user).save() + dataset_out = await DatasetDB(**dataset_in.dict(), creator=user).save() # Create authorization entry await AuthorizationDB( @@ -210,7 +210,7 @@ async def save_dataset( doc = { "name": dataset_out.name, "description": dataset_out.description, - "author": dataset_out.author.email, + "author": dataset_out.creator.email, "created": dataset_out.created, "modified": dataset_out.modified, "download": dataset_out.downloads, @@ -441,7 +441,7 @@ async def add_folder( if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: folder_dict = folder_in.dict() folder_db = FolderDB( - **folder_in.dict(), author=user, dataset_id=PyObjectId(dataset_id) + **folder_in.dict(), creator=user, dataset_id=PyObjectId(dataset_id) ) parent_folder = folder_in.parent_folder if parent_folder is not None: @@ -633,7 +633,7 @@ async def create_dataset_from_zip( dataset_name = file.filename.rstrip(".zip") dataset_description = "Uploaded as %s" % file.filename ds_dict = {"name": dataset_name, "description": dataset_description} - dataset = DatasetDB(**ds_dict, author=user) + dataset = DatasetDB(**ds_dict, creator=user) dataset.save() # Create folders diff --git a/backend/app/routers/metadata_datasets.py b/backend/app/routers/metadata_datasets.py index 27c3b718f..5ba6a89fd 100644 --- a/backend/app/routers/metadata_datasets.py +++ b/backend/app/routers/metadata_datasets.py @@ -126,7 +126,7 @@ async def add_dataset_metadata( "context": md.context, "name": dataset.name, "resource_created": dataset.created, - "author": dataset.author.email, + "author": dataset.creator.email, "description": dataset.description, } diff --git a/frontend/src/components/Explore.tsx b/frontend/src/components/Explore.tsx index 600a887d3..b4e66787c 100644 --- a/frontend/src/components/Explore.tsx +++ b/frontend/src/components/Explore.tsx @@ -1,19 +1,19 @@ -import React, {useEffect, useState} from "react"; -import {Box, Button, ButtonGroup, Grid, Tab, Tabs} from "@mui/material"; +import React, { useEffect, useState } from "react"; +import { Box, Button, ButtonGroup, Grid, Tab, Tabs } from "@mui/material"; -import {Dataset, RootState} from "../types/data"; -import {useDispatch, useSelector} from "react-redux"; -import {fetchDatasets} from "../actions/dataset"; -import {resetFailedReason} from "../actions/common"; -import {downloadThumbnail} from "../utils/thumbnail"; +import { Dataset, RootState } from "../types/data"; +import { useDispatch, useSelector } from "react-redux"; +import { fetchDatasets } from "../actions/dataset"; +import { resetFailedReason } from "../actions/common"; +import { downloadThumbnail } from "../utils/thumbnail"; -import {a11yProps, TabPanel} from "./tabs/TabComponent"; -import {ActionModal} from "./dialog/ActionModal"; +import { a11yProps, TabPanel } from "./tabs/TabComponent"; +import { ActionModal } from "./dialog/ActionModal"; import DatasetCard from "./datasets/DatasetCard"; import config from "../app.config"; -import {ArrowBack, ArrowForward} from "@material-ui/icons"; +import { ArrowBack, ArrowForward } from "@material-ui/icons"; import Layout from "./Layout"; -import {Listeners} from "./listeners/Listeners"; +import { Listeners } from "./listeners/Listeners"; const tab = { fontStyle: "normal", @@ -23,11 +23,13 @@ const tab = { }; export const Explore = (): JSX.Element => { - - // Redux connect equivalent const dispatch = useDispatch(); - const listDatasets = (skip: number | undefined, limit: number | undefined, mine: boolean | undefined) => dispatch(fetchDatasets(skip, limit, mine)); + const listDatasets = ( + skip: number | undefined, + limit: number | undefined, + mine: boolean | undefined + ) => dispatch(fetchDatasets(skip, limit, mine)); const dismissError = () => dispatch(resetFailedReason()); const datasets = useSelector((state: RootState) => state.dataset.datasets); const reason = useSelector((state: RootState) => state.error.reason); @@ -36,10 +38,10 @@ export const Explore = (): JSX.Element => { const [datasetThumbnailList, setDatasetThumbnailList] = useState([]); // TODO add option to determine limit number; default show 5 datasets each time const [currPageNum, setCurrPageNum] = useState(0); - const [limit,] = useState(20); + const [limit] = useState(20); const [skip, setSkip] = useState(); // TODO add switch to turn on and off "mine" dataset - const [mine,] = useState(false); + const [mine] = useState(false); const [prevDisabled, setPrevDisabled] = useState(true); const [nextDisabled, setNextDisabled] = useState(false); const [selectedTabIndex, setSelectedTabIndex] = useState(0); @@ -56,15 +58,17 @@ export const Explore = (): JSX.Element => { if (reason !== "" && reason !== null && reason !== undefined) { setErrorOpen(true); } - }, [reason]) + }, [reason]); const handleErrorCancel = () => { // reset error message and close the error window dismissError(); setErrorOpen(false); - } + }; const handleErrorReport = () => { window.open( - `${config.GHIssueBaseURL}+${encodeURIComponent(reason)}&body=${encodeURIComponent(stack)}` + `${config.GHIssueBaseURL}+${encodeURIComponent( + reason + )}&body=${encodeURIComponent(stack)}` ); }; @@ -72,16 +76,25 @@ export const Explore = (): JSX.Element => { useEffect(() => { (async () => { if (datasets !== undefined && datasets.length > 0) { - // TODO change the type any to something else const datasetThumbnailListTemp: any = []; - await Promise.all(datasets.map(async (dataset) => { - // add thumbnails - if (dataset["thumbnail"] !== null && dataset["thumbnail"] !== undefined) { - const thumbnailURL = await downloadThumbnail(dataset["thumbnail"]); - datasetThumbnailListTemp.push({"id": dataset["id"], "thumbnail": thumbnailURL}); - } - })); + await Promise.all( + datasets.map(async (dataset) => { + // add thumbnails + if ( + dataset["thumbnail"] !== null && + dataset["thumbnail"] !== undefined + ) { + const thumbnailURL = await downloadThumbnail( + dataset["thumbnail"] + ); + datasetThumbnailListTemp.push({ + id: dataset["id"], + thumbnail: thumbnailURL, + }); + } + }) + ); setDatasetThumbnailList(datasetThumbnailListTemp); } })(); @@ -89,11 +102,13 @@ export const Explore = (): JSX.Element => { // disable flipping if reaches the last page if (datasets.length < limit) setNextDisabled(true); else setNextDisabled(false); - }, [datasets]); // switch tabs - const handleTabChange = (_event: React.ChangeEvent<{}>, newTabIndex: number) => { + const handleTabChange = ( + _event: React.ChangeEvent<{}>, + newTabIndex: number + ) => { setSelectedTabIndex(newTabIndex); }; @@ -122,59 +137,87 @@ export const Explore = (): JSX.Element => {
{/*Error Message dialogue*/} - +
- - + + - { - datasets !== undefined && datasetThumbnailList !== undefined ? - datasets.map((dataset) => { - return ( - - - - ); - }) - : - <> - } + {datasets !== undefined && + datasetThumbnailList !== undefined ? ( + datasets.map((dataset) => { + return ( + + + + ); + }) + ) : ( + <> + )} - - - - - + - - - - - + + +
- ) -} + ); +}; diff --git a/frontend/src/components/files/FilesTable.tsx b/frontend/src/components/files/FilesTable.tsx index b85a94e97..1ddf51492 100644 --- a/frontend/src/components/files/FilesTable.tsx +++ b/frontend/src/components/files/FilesTable.tsx @@ -65,7 +65,7 @@ export default function FilesTable(props: FilesTableProps) {
by {folder.author.first_name} {folder.author.last_name} + align="right">by {folder.creator.first_name} {folder.creator.last_name}
    diff --git a/frontend/src/components/search/SearchResult.tsx b/frontend/src/components/search/SearchResult.tsx index 0a3b4319d..443829fe4 100644 --- a/frontend/src/components/search/SearchResult.tsx +++ b/frontend/src/components/search/SearchResult.tsx @@ -26,13 +26,13 @@ export function SearchResult(props) { const {data} = props; return ( - + {data.map((item) => ( - { item._index === "dataset" ? : } + {item._index === "dataset" ? : } - + { item._index === "dataset" ? } - { - item._index === "dataset" ? - `Created by ${parseString(item.author)} at ${parseDate(item.created)}` - : - `Created by ${parseString(item.creator)} at ${parseDate(item.created)}` - } + `Created by ${parseString(item.creator)} at ${parseDate(item.created)}` {item._index === "dataset" ? parseString(item.description) : `${item.content_type} | ${item.bytes} bytes`} From 8bf469baf9569cf7bf288bc31c79d8a153743a9d Mon Sep 17 00:00:00 2001 From: Max Burnette Date: Tue, 9 May 2023 09:01:36 -0500 Subject: [PATCH 30/32] formatting --- backend/app/routers/feeds.py | 46 ++++++++++++++++++------------------ 1 file changed, 23 insertions(+), 23 deletions(-) diff --git a/backend/app/routers/feeds.py b/backend/app/routers/feeds.py index e0a51fc9d..0d806c977 100644 --- a/backend/app/routers/feeds.py +++ b/backend/app/routers/feeds.py @@ -39,11 +39,11 @@ async def disassociate_listener_db(feed_id: str, listener_id: str): async def check_feed_listeners( - es_client, - file_out: FileOut, - user: UserOut, - rabbitmq_client: BlockingChannel, - token: str, + es_client, + file_out: FileOut, + user: UserOut, + rabbitmq_client: BlockingChannel, + token: str, ): """Automatically submit new file to listeners on feeds that fit the search criteria.""" listener_ids_found = [] @@ -59,7 +59,7 @@ async def check_feed_listeners( listener_ids_found.append(listener.listener_id) for targ_listener in listener_ids_found: if ( - listener_info := await EventListenerDB.get(PydanticObjectId(targ_listener)) + listener_info := await EventListenerDB.get(PydanticObjectId(targ_listener)) ) is not None: await submit_file_job( file_out, @@ -74,8 +74,8 @@ async def check_feed_listeners( @router.post("", response_model=FeedOut) async def save_feed( - feed_in: FeedIn, - user=Depends(get_current_username), + feed_in: FeedIn, + user=Depends(get_current_username), ): """Create a new Feed (i.e. saved search) in the database.""" feed = FeedDB(**feed_in.dict(), creator=user) @@ -85,10 +85,10 @@ async def save_feed( @router.get("", response_model=List[FeedOut]) async def get_feeds( - name: Optional[str] = None, - user=Depends(get_current_user), - skip: int = 0, - limit: int = 10, + name: Optional[str] = None, + user=Depends(get_current_user), + skip: int = 0, + limit: int = 10, ): """Fetch all existing Feeds.""" if name is not None: @@ -111,8 +111,8 @@ async def get_feeds( @router.get("/{feed_id}", response_model=FeedOut) async def get_feed( - feed_id: str, - user=Depends(get_current_user), + feed_id: str, + user=Depends(get_current_user), ): """Fetch an existing saved search Feed.""" if (feed := await FeedDB.get(PydanticObjectId(feed_id))) is not None: @@ -123,8 +123,8 @@ async def get_feed( @router.delete("/{feed_id}") async def delete_feed( - feed_id: str, - user=Depends(get_current_user), + feed_id: str, + user=Depends(get_current_user), ): """Delete an existing saved search Feed.""" if (feed := await FeedDB.get(PydanticObjectId(feed_id))) is not None: @@ -135,9 +135,9 @@ async def delete_feed( @router.post("/{feed_id}/listeners", response_model=FeedOut) async def associate_listener( - feed_id: str, - listener: FeedListener, - user=Depends(get_current_user), + feed_id: str, + listener: FeedListener, + user=Depends(get_current_user), ): """Associate an existing Event Listener with a Feed, e.g. so it will be triggered on new Feed results. @@ -147,7 +147,7 @@ async def associate_listener( """ if (feed := await FeedDB.get(PydanticObjectId(feed_id))) is not None: if ( - exists := await EventListenerDB.get(PydanticObjectId(listener.listener_id)) + exists := await EventListenerDB.get(PydanticObjectId(listener.listener_id)) ) is not None: feed.listeners.append(listener) await feed.save() @@ -160,9 +160,9 @@ async def associate_listener( @router.delete("/{feed_id}/listeners/{listener_id}", response_model=FeedOut) async def disassociate_listener( - feed_id: str, - listener_id: str, - user=Depends(get_current_user), + feed_id: str, + listener_id: str, + user=Depends(get_current_user), ): """Disassociate an Event Listener from a Feed. From c400442753f69ea4f24ac2a8912e27513c21a1dd Mon Sep 17 00:00:00 2001 From: Max Burnette Date: Tue, 9 May 2023 09:11:00 -0500 Subject: [PATCH 31/32] consistent syntax --- backend/app/models/metadata.py | 9 +- backend/app/routers/authorization.py | 138 +++++++++------------------ 2 files changed, 49 insertions(+), 98 deletions(-) diff --git a/backend/app/models/metadata.py b/backend/app/models/metadata.py index fe88aec64..d64516c2b 100644 --- a/backend/app/models/metadata.py +++ b/backend/app/models/metadata.py @@ -5,7 +5,7 @@ from beanie import Document, PydanticObjectId from elasticsearch import Elasticsearch from fastapi import HTTPException -from pydantic import Field, validator, AnyUrl +from pydantic import Field, validator, AnyUrl, BaseModel from app.models.listeners import ( EventListenerIn, @@ -272,8 +272,11 @@ async def validate_context( if context_url is not None: pass if definition is not None: - md_def = MetadataDefinitionDB.find_one(MetadataDefinitionDB.name == definition) - if md_def: + if ( + md_def := await MetadataDefinitionDB.find_one( + MetadataDefinitionDB.name == definition + ) + ) is not None: content = validate_definition(content, md_def) else: raise HTTPException( diff --git a/backend/app/routers/authorization.py b/backend/app/routers/authorization.py index 0cb32d869..cbcd4908f 100644 --- a/backend/app/routers/authorization.py +++ b/backend/app/routers/authorization.py @@ -1,5 +1,6 @@ from typing import List +from beanie import PydanticObjectId from beanie.operators import Or from bson import ObjectId from fastapi import APIRouter, Depends @@ -24,7 +25,6 @@ RoleType, ) from app.models.datasets import ( - DatasetOut, UserAndRole, GroupAndRole, DatasetRoles, @@ -66,16 +66,10 @@ async def save_authorization( ) authorization_dict = authorization_in.dict() - authorization_dict["user_ids"] = user_ids authorization_db = await AuthorizationDB( - **authorization_dict, creator=user + **authorization_dict, creator=user, user_ids=user_ids ).insert() return authorization_db - # new_authorization = await db["authorization"].insert_one( - # authorization_db.to_mongo() - # ) - # found = await db["authorization"].find_one({"_id": new_authorization.inserted_id}) - # return AuthorizationDB.from_mongo(found) @router.get("/datasets/{dataset_id}/role", response_model=AuthorizationDB) @@ -86,31 +80,21 @@ async def get_dataset_role( ): """Retrieve role of user for a specific dataset.""" # Get group id and the associated users from authorization - authorization = await AuthorizationDB.find_one( - AuthorizationDB.dataset_id == PyObjectId(dataset_id), - Or( - AuthorizationDB.creator == current_user, - AuthorizationDB.user_ids == current_user, - ), - ) - # if ( - # authorization_q := await db["authorization"].find_one( - # { - # "$and": [ - # {"dataset_id": ObjectId(dataset_id)}, - # {"$or": [{"creator": current_user}, {"user_ids": current_user}]}, - # ] - # } - # ) - # ) is not None: - # authorization = AuthorizationDB.from_mongo(authorization_q) - # return authorization - if authorization is None: + + if ( + auth_db := await AuthorizationDB.find_one( + AuthorizationDB.dataset_id == PyObjectId(dataset_id), + Or( + AuthorizationDB.creator == current_user, + AuthorizationDB.user_ids == current_user, + ), + ) + ) is None: raise HTTPException( status_code=404, detail=f"No authorization found for dataset: {dataset_id}" ) else: - return authorization + return auth_db @router.get("/datasets/{dataset_id}/role/viewer") @@ -174,34 +158,24 @@ async def set_dataset_group_role( allow: bool = Depends(Authorization("editor")), ): """Assign an entire group a specific role for a dataset.""" - dataset = await DatasetDB.find_one(DatasetDB.id == ObjectId(dataset_id)) - if dataset: + if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: if ( group_q := await db["groups"].find_one({"_id": ObjectId(group_id)}) ) is not None: group = GroupOut.from_mongo(group_q) # First, remove any existing role the group has on the dataset await remove_dataset_group_role(dataset_id, group_id, db, user_id, allow) - auth_db = await AuthorizationDB.find_one( - AuthorizationDB.dataset_id == PyObjectId(dataset_id), - AuthorizationDB.role == role, - ) - # if ( - # auth_q := await db["authorization"].find_one( - # {"dataset_id": ObjectId(dataset_id), "role": role} - # ) - # ) is not None: - # Update existing role entry for this dataset - # auth_db = AuthorizationDB.from_mongo(auth_q) - if auth_db is not None: + if ( + auth_db := await AuthorizationDB.find_one( + AuthorizationDB.dataset_id == PyObjectId(dataset_id), + AuthorizationDB.role == role, + ) + ) is not None: if group_id not in auth_db.group_ids: auth_db.group_ids.append(ObjectId(group_id)) for u in group.users: auth_db.user_ids.append(u.user.email) - await auth_db.save() - # await db["authorization"].replace_one( - # {"_id": auth_db.id}, auth_db.to_mongo() - # ) + await auth_db.replace() return auth_db else: # Create new role entry for this dataset @@ -214,8 +188,7 @@ async def set_dataset_group_role( role=role, group_ids=[PyObjectId(group_id)], user_ids=user_ids, - ).save() - # await db["authorization"].insert_one(auth_db.to_mongo()) + ).insert() return auth_db else: raise HTTPException(status_code=404, detail=f"Group {group_id} not found") @@ -236,8 +209,7 @@ async def set_dataset_user_role( ): """Assign a single user a specific role for a dataset.""" - dataset = await DatasetDB.find_one(DatasetDB.id == ObjectId(dataset_id)) - if dataset: + if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: if (user_q := await db["users"].find_one({"email": username})) is not None: # First, remove any existing role the user has on the dataset await remove_dataset_user_role(dataset_id, username, db, user_id, allow) @@ -273,8 +245,7 @@ async def set_dataset_user_role( dataset_id=PyObjectId(dataset_id), role=role, user_ids=[username], - ).save() - # await db["authorization"].insert_one(auth_db.to_mongo()) + ).insert() return auth_db else: @@ -296,35 +267,22 @@ async def remove_dataset_group_role( ): """Remove any role the group has with a specific dataset.""" - dataset = await DatasetDB.find_one(DatasetDB.id == ObjectId(dataset_id)) - if dataset: + if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: if ( group_q := await db["groups"].find_one({"_id": ObjectId(group_id)}) ) is not None: group = GroupOut.from_mongo(group_q) - auth_db = await AuthorizationDB.find_one( - AuthorizationDB.dataset_id == PyObjectId(dataset_id), - AuthorizationDB.group_ids == group_id, - ) - # if ( - # auth_q := await db["authorization"].find_one( - # { - # "dataset_id": ObjectId(dataset_id), - # "group_ids": ObjectId(group_id), - # } - # ) - # ) is not None: - # # Remove group from affected authorizations - # auth_db = AuthorizationDB.from_mongo(auth_q) - if auth_db is not None: + if ( + auth_db := await AuthorizationDB.find_one( + AuthorizationDB.dataset_id == PyObjectId(dataset_id), + AuthorizationDB.group_ids == group_id, + ) + ) is not None: auth_db.group_ids.remove(PyObjectId(group_id)) for u in group.users: if u.user.email in auth_db.user_ids: auth_db.user_ids.remove(u.user.email) - await auth_db.save() - # await db["authorization"].replace_one( - # {"_id": auth_db.id}, auth_db.to_mongo() - # ) + await auth_db.replace() return auth_db else: raise HTTPException(status_code=404, detail=f"Group {group_id} not found") @@ -345,25 +303,16 @@ async def remove_dataset_user_role( ): """Remove any role the user has with a specific dataset.""" - dataset = await DatasetDB.find_one(DatasetDB.id == ObjectId(dataset_id)) - if dataset: + if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: if (user_q := await db["users"].find_one({"email": username})) is not None: - auth_db = await AuthorizationDB.find_one( - AuthorizationDB.dataset_id == PyObjectId(dataset_id), - AuthorizationDB.user_ids == username, - ) - # if ( - # auth_q := await db["authorization"].find_one( - # {"dataset_id": ObjectId(dataset_id), "user_ids": username} - # ) - # ) is not None: - # Remove user from affected authorizations - # auth_db = AuthorizationDB.from_mongo(auth_q) - if auth_db is not None: - auth_db.user_ids.remove(username) - await db["authorization"].replace_one( - {"_id": auth_db.id}, auth_db.to_mongo() + if ( + auth_db := await AuthorizationDB.find_one( + AuthorizationDB.dataset_id == PyObjectId(dataset_id), + AuthorizationDB.user_ids == username, ) + ) is not None: + auth_db.user_ids.remove(username) + await auth_db.replace() return auth_db else: raise HTTPException(status_code=404, detail=f"User {username} not found") @@ -378,12 +327,11 @@ async def get_dataset_roles( allow: bool = Depends(Authorization("editor")), ): """Get a list of all users and groups that have assigned roles on this dataset.""" - dataset = await DatasetDB.find_one(DatasetDB.id == ObjectId(dataset_id)) - if dataset: + if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: roles = DatasetRoles(dataset_id=str(dataset.id)) - async for auth_q in db["authorization"].find( - {"dataset_id": ObjectId(dataset_id)} + async for auth_q in AuthorizationDB.find( + AuthorizationDB.dataset_id == ObjectId(dataset_id) ): auth = AuthorizationOut.from_mongo(auth_q) From c4cda54ddbecf7b86807e071c5b5904d0e163e2b Mon Sep 17 00:00:00 2001 From: Max Burnette Date: Wed, 10 May 2023 09:50:45 -0500 Subject: [PATCH 32/32] fix init of listener views --- backend/app/main.py | 12 ++ backend/app/models/authorization.py | 16 +- backend/app/models/listeners.py | 166 ++++++++-------- backend/app/rabbitmq/listeners.py | 2 +- backend/app/rabbitmq/message_listener_sync.py | 6 +- backend/app/routers/datasets.py | 13 +- backend/app/routers/files.py | 8 +- backend/app/routers/jobs.py | 16 +- backend/app/routers/listeners.py | 51 +++-- .../src/openapi/v2/models/EventListenerJob.ts | 32 ++-- .../src/openapi/v2/services/JobsService.ts | 180 +++++++++--------- frontend/src/types/data.ts | 8 +- 12 files changed, 258 insertions(+), 252 deletions(-) diff --git a/backend/app/main.py b/backend/app/main.py index 57b10be82..5e68460fe 100644 --- a/backend/app/main.py +++ b/backend/app/main.py @@ -12,6 +12,13 @@ from app.models.authorization import AuthorizationDB from app.models.datasets import DatasetDB, DatasetDBViewList from app.models.feeds import FeedDB +from app.models.listeners import ( + EventListenerDB, + EventListenerJobDB, + EventListenerJobUpdateDB, + EventListenerJobViewList, + EventListenerJobUpdateViewList, +) from app.models.metadata import MetadataDB, MetadataDefinitionDB from app.routers import ( folders, @@ -175,6 +182,11 @@ async def startup_beanie(): MetadataDB, MetadataDefinitionDB, FeedDB, + EventListenerDB, + EventListenerJobDB, + EventListenerJobUpdateDB, + EventListenerJobViewList, + EventListenerJobUpdateViewList, ], recreate_views=True, ) diff --git a/backend/app/models/authorization.py b/backend/app/models/authorization.py index cf9e8ea08..110c19aea 100644 --- a/backend/app/models/authorization.py +++ b/backend/app/models/authorization.py @@ -35,10 +35,17 @@ class Config: use_enum_values = True -class AuthorizationDB(MongoModel, AuthorizationBase): +class Provenance: pass +class AuthorizationDB(Document, AuthorizationBase, Provenance): + """The creator of the Authorization object should also be the creator of the dataset itself.""" + + class Settings: + name = "authorization" + + class AuthorizationOut(AuthorizationDB): pass @@ -75,10 +82,3 @@ class Provenance(BaseModel): creator: EmailStr created: datetime = Field(default_factory=datetime.utcnow) modified: datetime = Field(default_factory=datetime.utcnow) - - -class AuthorizationDB(Document, AuthorizationBase, Provenance): - """The creator of the Authorization object should also be the creator of the dataset itself.""" - - class Settings: - name = "authorization" diff --git a/backend/app/models/listeners.py b/backend/app/models/listeners.py index 1297e2632..41a4c1e81 100644 --- a/backend/app/models/listeners.py +++ b/backend/app/models/listeners.py @@ -7,9 +7,9 @@ from pydantic import Field, BaseModel, AnyUrl from app.config import settings +from app.models.authorization import AuthorizationDB from app.models.mongomodel import MongoDBRef from app.models.pyobjectid import PyObjectId -from app.models.authorization import AuthorizationDB from app.models.users import UserOut @@ -67,6 +67,7 @@ class LegacyEventListenerIn(ExtractorInfo): class EventListenerDB(Document, EventListenerBase): """EventListeners have a name, version, author, description, and optionally properties where extractor_info will be saved.""" + id: PydanticObjectId = Field(None, alias="_id") creator: Optional[UserOut] = None created: datetime = Field(default_factory=datetime.now) modified: datetime = Field(default_factory=datetime.now) @@ -110,10 +111,7 @@ class EventListenerJobStatus(str, Enum): RESUBMITTED = "RESUBMITTED" -class EventListenerJob(Document): - """This summarizes a submission to an extractor. All messages from that extraction should include this job's ID.""" - - id: PydanticObjectId = Field(None, alias="_id") +class EventListenerJobBase(BaseModel): listener_id: str resource_ref: MongoDBRef creator: UserOut @@ -130,14 +128,18 @@ class Config: # required for Enum to properly work use_enum_values = True + +class EventListenerJobDB(Document, EventListenerJobBase): + """This summarizes a submission to an extractor. All messages from that extraction should include this job's ID.""" + + id: PydanticObjectId = Field(None, alias="_id") + class Settings: name = "listener_jobs" indexes = [ - [ - ("resource_ref.resource_id", PyObjectId), - ("listener_id", pymongo.TEXT), - ("status", pymongo.TEXT), - ], + ("resource_ref.resource_id", pymongo.TEXT), + ("listener_id", pymongo.TEXT), + ("status", pymongo.TEXT), ] @@ -170,7 +172,7 @@ class EventListenerDatasetJobMessage(BaseModel): job_id: str -class EventListenerJobUpdate(Document): +class EventListenerJobUpdateBase(BaseModel): """This is a status update message coming from the extractors back to Clowder.""" id: PydanticObjectId = Field(None, alias="_id") @@ -178,6 +180,8 @@ class EventListenerJobUpdate(Document): timestamp: datetime = Field(default_factory=datetime.utcnow) status: str + +class EventListenerJobUpdateDB(Document, EventListenerJobUpdateBase): class Settings: name = "listener_job_updates" indexes = [ @@ -188,7 +192,7 @@ class Settings: ] -class EventListenerJobViewList(View, EventListenerJob): +class EventListenerJobViewList(View, EventListenerJobBase): """Get associated resource information for each job""" # FIXME This seems to be required to return _id. Otherwise _id is null in the response. @@ -199,7 +203,7 @@ class EventListenerJobViewList(View, EventListenerJob): auth: List[AuthorizationDB] class Settings: - source = EventListenerJob + source = EventListenerJobDB name = "listener_jobs_view" pipeline = [ { @@ -255,7 +259,7 @@ class Settings: # cache_capacity = 5 -class EventListenerJobUpdateViewList(View, EventListenerJob): +class EventListenerJobUpdateViewList(View, EventListenerJobUpdateBase): """Get associated resource information for each job update""" # FIXME This seems to be required to return _id. Otherwise _id is null in the response. @@ -266,78 +270,76 @@ class EventListenerJobUpdateViewList(View, EventListenerJob): auth: List[AuthorizationDB] class Settings: - source = EventListenerJob + source = EventListenerJobUpdateDB name = "listener_jobs_view" - pipeline = ( - [ - { - "$lookup": { # Equality Match - "from": "listener_jobs", - "localField": "job_id", - "foreignField": "_id", - "as": "listener_job_details", - } - }, - { - "$facet": { - "extraction_on_dataset": [ - { - "$match": { - "listener_job_details.resource_ref.collection": { - "$eq": "dataset" - } - } - }, - { - "$lookup": { - "from": "authorization", - "localField": "listener_job_details.resource_ref.resource_id", - "foreignField": "dataset_id", - "as": "auth", - } - }, - ], - "extraction_on_file": [ - { - "$match": { - "listener_job_details.resource_ref.collection": { - "$eq": "file" - } - } - }, - { - "$lookup": { - "from": "files", - "localField": "listener_job_details.resource_ref.resource_id", - "foreignField": "_id", - "as": "file_details", + pipeline = [ + { + "$lookup": { # Equality Match + "from": "listener_jobs", + "localField": "job_id", + "foreignField": "_id", + "as": "listener_job_details", + } + }, + { + "$facet": { + "extraction_on_dataset": [ + { + "$match": { + "listener_job_details.resource_ref.collection": { + "$eq": "dataset" } - }, - { - "$lookup": { - "from": "authorization", - "localField": "file_details.dataset_id", - "foreignField": "dataset_id", - "as": "auth", + } + }, + { + "$lookup": { + "from": "authorization", + "localField": "listener_job_details.resource_ref.resource_id", + "foreignField": "dataset_id", + "as": "auth", + } + }, + ], + "extraction_on_file": [ + { + "$match": { + "listener_job_details.resource_ref.collection": { + "$eq": "file" } - }, - ], - } - }, - { - "$project": { - "all": { - "$concatArrays": [ - "$extraction_on_dataset", - "$extraction_on_file", - ] - } + } + }, + { + "$lookup": { + "from": "files", + "localField": "listener_job_details.resource_ref.resource_id", + "foreignField": "_id", + "as": "file_details", + } + }, + { + "$lookup": { + "from": "authorization", + "localField": "file_details.dataset_id", + "foreignField": "dataset_id", + "as": "auth", + } + }, + ], + } + }, + { + "$project": { + "all": { + "$concatArrays": [ + "$extraction_on_dataset", + "$extraction_on_file", + ] } - }, - {"$unwind": "$all"}, - {"$replaceRoot": {"newRoot": "$all"}}, - ], - ) + } + }, + {"$unwind": "$all"}, + {"$replaceRoot": {"newRoot": "$all"}}, + ] # Needs fix to work https://github.com/roman-right/beanie/pull/521 # use_cache = True # cache_expiration_time = timedelta(seconds=10) diff --git a/backend/app/rabbitmq/listeners.py b/backend/app/rabbitmq/listeners.py index 33543eda5..be3082b04 100644 --- a/backend/app/rabbitmq/listeners.py +++ b/backend/app/rabbitmq/listeners.py @@ -15,7 +15,7 @@ from app.models.datasets import DatasetOut from app.models.users import UserOut from app.models.listeners import ( - EventListenerJob, + EventListenerJobDB, EventListenerDB, EventListenerJobMessage, EventListenerDatasetJobMessage, diff --git a/backend/app/rabbitmq/message_listener_sync.py b/backend/app/rabbitmq/message_listener_sync.py index 5212ecb88..783bb91ee 100644 --- a/backend/app/rabbitmq/message_listener_sync.py +++ b/backend/app/rabbitmq/message_listener_sync.py @@ -12,9 +12,9 @@ from app.config import settings from app.models.config import ConfigEntryDB, ConfigEntryOut from app.models.listeners import ( - EventListenerJob, + EventListenerJobDB, EventListenerDB, - EventListenerJobUpdate, + EventListenerJobUpdateDB, EventListenerJobStatus, ) @@ -123,7 +123,7 @@ def callback(ch, method, properties, body): job.save() # Add latest message to the job updates - event_msg = EventListenerJobUpdate( + event_msg = EventListenerJobUpdateDB( job_id=job_id, status=cleaned_msg, timestamp=timestamp ) event_msg.save() diff --git a/backend/app/routers/datasets.py b/backend/app/routers/datasets.py index 37d197463..bb3029ed4 100644 --- a/backend/app/routers/datasets.py +++ b/backend/app/routers/datasets.py @@ -9,6 +9,7 @@ from typing import List, Optional, Union from beanie import PydanticObjectId +from beanie.operators import Or from bson import ObjectId from bson import json_util from elasticsearch import Elasticsearch @@ -243,12 +244,10 @@ async def get_datasets( ).to_list() else: return await DatasetDBViewList.find( - { - "$or": [ - {"author.email": user_id}, - {"auth": {"$elemMatch": {"user_ids": user_id}}}, - ] - }, + Or( + DatasetDBViewList.creator.email == user_id, + DatasetDBViewList.auth.user_ids == user_id, + ), sort=("created", DESCENDING), skip=skip, limit=limit, @@ -372,7 +371,7 @@ async def edit_dataset( async def patch_dataset( dataset_id: str, dataset_info: DatasetPatch, - user=Depends(get_current_user()), + user=Depends(get_current_user), es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), allow: bool = Depends(Authorization("editor")), ): diff --git a/backend/app/routers/files.py b/backend/app/routers/files.py index b535d9f65..c3b70cb5e 100644 --- a/backend/app/routers/files.py +++ b/backend/app/routers/files.py @@ -25,7 +25,7 @@ from app.models.files import FileOut, FileVersion, FileContentType, FileDB from app.models.metadata import MetadataDB from app.models.users import UserOut -from app.rabbitmq.listeners import submit_file_job, EventListenerJob +from app.rabbitmq.listeners import submit_file_job, EventListenerJobDB from app.routers.feeds import check_feed_listeners from app.search.connect import ( insert_record, @@ -57,9 +57,9 @@ async def _resubmit_file_extractors( """ resubmitted_jobs = [] - jobs = await EventListenerJob.find( - EventListenerJob.resource_ref.resource_id == ObjectId(file.id), - EventListenerJob.resource_ref.version == file.version_num - 1, + jobs = await EventListenerJobDB.find( + EventListenerJobDB.resource_ref.resource_id == ObjectId(file.id), + EventListenerJobDB.resource_ref.version == file.version_num - 1, ) for job in jobs: resubmitted_job = {"listener_id": job.listener_id, "parameters": job.parameters} diff --git a/backend/app/routers/jobs.py b/backend/app/routers/jobs.py index 85da839f3..c0d009f42 100644 --- a/backend/app/routers/jobs.py +++ b/backend/app/routers/jobs.py @@ -8,8 +8,8 @@ from app import dependencies from app.models.listeners import ( - EventListenerJob, - EventListenerJobUpdate, + EventListenerJobDB, + EventListenerJobUpdateDB, EventListenerJobViewList, ) from app.keycloak_auth import get_current_user, get_user, get_current_username @@ -17,7 +17,7 @@ router = APIRouter() -@router.get("", response_model=List[EventListenerJob]) +@router.get("", response_model=List[EventListenerJobDB]) async def get_all_job_summary( current_user_id=Depends(get_user), listener_id: Optional[str] = None, @@ -80,12 +80,12 @@ async def get_all_job_summary( ) -@router.get("/{job_id}/summary", response_model=EventListenerJob) +@router.get("/{job_id}/summary", response_model=EventListenerJobDB) async def get_job_summary( job_id: str, user=Depends(get_current_username), ): - job = await EventListenerJob.find_one(EventListenerJob.id == ObjectId(job_id)) + job = await EventListenerJobDB.find_one(EventListenerJobDB.id == ObjectId(job_id)) if job: return job raise HTTPException(status_code=404, detail=f"Job {job_id} not found") @@ -96,10 +96,10 @@ async def get_job_updates( job_id: str, user=Depends(get_current_username), ): - job = await EventListenerJob.find_one(EventListenerJob.id == ObjectId(job_id)) + job = await EventListenerJobDB.find_one(EventListenerJobDB.id == ObjectId(job_id)) if job: # TODO: Should this also return the job summary data since we just queried it here? - return await EventListenerJobUpdate.find( - EventListenerJobUpdate.job_id == job_id + return await EventListenerJobUpdateDB.find( + EventListenerJobUpdateDB.job_id == job_id ) raise HTTPException(status_code=404, detail=f"Job {job_id} not found") diff --git a/backend/app/routers/listeners.py b/backend/app/routers/listeners.py index 8c02d30eb..00c1ac232 100644 --- a/backend/app/routers/listeners.py +++ b/backend/app/routers/listeners.py @@ -13,7 +13,7 @@ from app.dependencies import get_db from app.keycloak_auth import get_user, get_current_user, get_current_username -from app.models.config import ConfigEntryDB, ConfigEntryOut +from app.models.config import ConfigEntryDB from app.models.feeds import FeedDB, FeedListener from app.models.listeners import ( ExtractorInfo, @@ -32,9 +32,9 @@ async def _process_incoming_v1_extractor_info( - extractor_name: str, - extractor_id: str, - process: dict, + extractor_name: str, + extractor_id: str, + process: dict, ): """Return FeedDB object given v1 extractor info.""" if "file" in process: @@ -73,8 +73,8 @@ async def _process_incoming_v1_extractor_info( @router.get("/instance") async def get_instance_id( - user=Depends(get_current_user), - db: MongoClient = Depends(get_db), + user=Depends(get_current_user), + db: MongoClient = Depends(get_db), ): instance_id = await ConfigEntryDB.find_one({ConfigEntryDB.key == "instance_id"}) if instance_id: @@ -93,8 +93,8 @@ async def get_instance_id( @router.post("", response_model=EventListenerOut) async def save_listener( - listener_in: EventListenerIn, - user=Depends(get_current_user), + listener_in: EventListenerIn, + user=Depends(get_current_user), ): """Register a new Event Listener with the system.""" listener = EventListenerDB(**listener_in.dict(), creator=user) @@ -105,8 +105,8 @@ async def save_listener( @legacy_router.post("", response_model=EventListenerOut) async def save_legacy_listener( - legacy_in: LegacyEventListenerIn, - user=Depends(get_current_user), + legacy_in: LegacyEventListenerIn, + user=Depends(get_current_user), ): """This will take a POST with Clowder v1 extractor_info included, and convert/update to a v2 Listener.""" listener_properties = ExtractorInfo(**legacy_in.dict()) @@ -142,7 +142,7 @@ async def save_legacy_listener( @router.get("/search", response_model=List[EventListenerOut]) async def search_listeners( - text: str = "", skip: int = 0, limit: int = 2, user=Depends(get_current_username) + text: str = "", skip: int = 0, limit: int = 2, user=Depends(get_current_username) ): """Search all Event Listeners in the db based on text. @@ -179,7 +179,7 @@ async def list_default_labels(user=Depends(get_current_username)): async def get_listener(listener_id: str, user=Depends(get_current_username)): """Return JSON information about an Event Listener if it exists.""" if ( - listener := EventListenerDB.find_one(PydanticObjectId(listener_id)) + listener := EventListenerDB.find_one(PydanticObjectId(listener_id)) ) is not None: return EventListenerOut(**listener.dict()) raise HTTPException(status_code=404, detail=f"listener {listener_id} not found") @@ -187,11 +187,11 @@ async def get_listener(listener_id: str, user=Depends(get_current_username)): @router.get("", response_model=List[EventListenerOut]) async def get_listeners( - user_id=Depends(get_current_username), - skip: int = 0, - limit: int = 2, - category: Optional[str] = None, - label: Optional[str] = None, + user_id=Depends(get_current_username), + skip: int = 0, + limit: int = 2, + category: Optional[str] = None, + label: Optional[str] = None, ): """Get a list of all Event Listeners in the db. @@ -207,19 +207,14 @@ async def get_listeners( if label: query.append(EventListenerDB.properties.default_labels == label) - return ( - await EventListenerDB.find(**query) - .skip(skip) - .limit(limit) - .to_list(length=limit) - ) + return await EventListenerDB.find(*query, skip=skip, limit=limit).to_list() @router.put("/{listener_id}", response_model=EventListenerOut) async def edit_listener( - listener_id: str, - listener_in: EventListenerIn, - user_id=Depends(get_user), + listener_id: str, + listener_in: EventListenerIn, + user_id=Depends(get_user), ): """Update the information about an existing Event Listener.. @@ -243,8 +238,8 @@ async def edit_listener( @router.delete("/{listener_id}") async def delete_listener( - listener_id: str, - user=Depends(get_current_username), + listener_id: str, + user=Depends(get_current_username), ): """Remove an Event Listener from the database. Will not clear event history for the listener.""" listener = EventListenerDB.find(EventListenerDB.id == ObjectId(listener_id)) diff --git a/frontend/src/openapi/v2/models/EventListenerJob.ts b/frontend/src/openapi/v2/models/EventListenerJob.ts index b32ba051a..c0a0c6a6c 100644 --- a/frontend/src/openapi/v2/models/EventListenerJob.ts +++ b/frontend/src/openapi/v2/models/EventListenerJob.ts @@ -2,23 +2,23 @@ /* tslint:disable */ /* eslint-disable */ -import type { MongoDBRef } from './MongoDBRef'; -import type { UserOut } from './UserOut'; +import type { MongoDBRef } from "./MongoDBRef"; +import type { UserOut } from "./UserOut"; /** * This summarizes a submission to an extractor. All messages from that extraction should include this job's ID. */ -export type EventListenerJob = { - id?: string; - listener_id: string; - resource_ref: MongoDBRef; - creator: UserOut; - parameters?: any; - created?: string; - started?: string; - updated?: string; - finished?: string; - duration?: number; - latest_message?: string; - status?: string; -} +export type EventListenerJobDB = { + id?: string; + listener_id: string; + resource_ref: MongoDBRef; + creator: UserOut; + parameters?: any; + created?: string; + started?: string; + updated?: string; + finished?: string; + duration?: number; + latest_message?: string; + status?: string; +}; diff --git a/frontend/src/openapi/v2/services/JobsService.ts b/frontend/src/openapi/v2/services/JobsService.ts index 25c8aebcf..f6d966e9d 100644 --- a/frontend/src/openapi/v2/services/JobsService.ts +++ b/frontend/src/openapi/v2/services/JobsService.ts @@ -1,98 +1,96 @@ /* istanbul ignore file */ /* tslint:disable */ /* eslint-disable */ -import type { EventListenerJob } from '../models/EventListenerJob'; -import type { CancelablePromise } from '../core/CancelablePromise'; -import { request as __request } from '../core/request'; +import type { EventListenerJob } from "../models/EventListenerJob"; +import type { CancelablePromise } from "../core/CancelablePromise"; +import { request as __request } from "../core/request"; export class JobsService { + /** + * Get All Job Summary + * Get a list of all jobs from the db. + * Arguments: + * listener_id -- listener id + * status -- filter by status + * user_id -- filter by user id + * file_id -- filter by file id + * dataset_id -- filter by dataset id + * created: Optional[datetime] = None, + * skip -- number of initial records to skip (i.e. for pagination) + * limit -- restrict number of records to be returned (i.e. for pagination) + * @param listenerId + * @param status + * @param userId + * @param fileId + * @param datasetId + * @param created + * @param skip + * @param limit + * @returns EventListenerJob Successful Response + * @throws ApiError + */ + public static getAllJobSummaryApiV2JobsGet( + listenerId?: string, + status?: string, + userId?: string, + fileId?: string, + datasetId?: string, + created?: string, + skip?: number, + limit: number = 2 + ): CancelablePromise> { + return __request({ + method: "GET", + path: `/api/v2/jobs`, + query: { + listener_id: listenerId, + status: status, + user_id: userId, + file_id: fileId, + dataset_id: datasetId, + created: created, + skip: skip, + limit: limit, + }, + errors: { + 422: `Validation Error`, + }, + }); + } - /** - * Get All Job Summary - * Get a list of all jobs from the db. - * Arguments: - * listener_id -- listener id - * status -- filter by status - * user_id -- filter by user id - * file_id -- filter by file id - * dataset_id -- filter by dataset id - * created: Optional[datetime] = None, - * skip -- number of initial records to skip (i.e. for pagination) - * limit -- restrict number of records to be returned (i.e. for pagination) - * @param listenerId - * @param status - * @param userId - * @param fileId - * @param datasetId - * @param created - * @param skip - * @param limit - * @returns EventListenerJob Successful Response - * @throws ApiError - */ - public static getAllJobSummaryApiV2JobsGet( - listenerId?: string, - status?: string, - userId?: string, - fileId?: string, - datasetId?: string, - created?: string, - skip?: number, - limit: number = 2, - ): CancelablePromise> { - return __request({ - method: 'GET', - path: `/api/v2/jobs`, - query: { - 'listener_id': listenerId, - 'status': status, - 'user_id': userId, - 'file_id': fileId, - 'dataset_id': datasetId, - 'created': created, - 'skip': skip, - 'limit': limit, - }, - errors: { - 422: `Validation Error`, - }, - }); - } + /** + * Get Job Summary + * @param jobId + * @returns EventListenerJobDB Successful Response + * @throws ApiError + */ + public static getJobSummaryApiV2JobsJobIdSummaryGet( + jobId: string + ): CancelablePromise { + return __request({ + method: "GET", + path: `/api/v2/jobs/${jobId}/summary`, + errors: { + 422: `Validation Error`, + }, + }); + } - /** - * Get Job Summary - * @param jobId - * @returns EventListenerJob Successful Response - * @throws ApiError - */ - public static getJobSummaryApiV2JobsJobIdSummaryGet( - jobId: string, - ): CancelablePromise { - return __request({ - method: 'GET', - path: `/api/v2/jobs/${jobId}/summary`, - errors: { - 422: `Validation Error`, - }, - }); - } - - /** - * Get Job Updates - * @param jobId - * @returns any Successful Response - * @throws ApiError - */ - public static getJobUpdatesApiV2JobsJobIdUpdatesGet( - jobId: string, - ): CancelablePromise { - return __request({ - method: 'GET', - path: `/api/v2/jobs/${jobId}/updates`, - errors: { - 422: `Validation Error`, - }, - }); - } - -} \ No newline at end of file + /** + * Get Job Updates + * @param jobId + * @returns any Successful Response + * @throws ApiError + */ + public static getJobUpdatesApiV2JobsJobIdUpdatesGet( + jobId: string + ): CancelablePromise { + return __request({ + method: "GET", + path: `/api/v2/jobs/${jobId}/updates`, + errors: { + 422: `Validation Error`, + }, + }); + } +} diff --git a/frontend/src/types/data.ts b/frontend/src/types/data.ts index 23dcad0b6..b840dca31 100644 --- a/frontend/src/types/data.ts +++ b/frontend/src/types/data.ts @@ -1,7 +1,7 @@ import { AuthorizationBase, DatasetRoles, - EventListenerJob, + EventListenerJobDB, FileOut as FileSummary, FileVersion, FolderOut, @@ -16,7 +16,7 @@ import { export interface Dataset { name: string; description: string; - _id:string; + _id: string; author: Author; created: string | Date; modified: string | Date; @@ -158,8 +158,8 @@ export interface ListenerState { listeners: Listener[]; categories: string[]; labels: string[]; - jobs: EventListenerJob[]; - currJobUpdates: EventListenerJob[]; + jobs: EventListenerJobDB[]; + currJobUpdates: EventListenerJobDB[]; currJobSummary: JobSummary[]; currJobId: string; }