Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
70 changes: 58 additions & 12 deletions backend/app/routers/datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -229,8 +229,15 @@ async def get_datasets(
datasets = []
if mine:
for doc in (
await db["datasets"]
.find({"author.email": user_id})
await db["datasets_view"]
.find(
{
"$and": [
{"author.email": user_id},
{"auth": {"$elemMatch": {"user_id": {"$eq": user_id}}}},
]
}
)
.sort([("created", pymongo.DESCENDING)])
.skip(skip)
.limit(limit)
Expand All @@ -239,8 +246,15 @@ async def get_datasets(
datasets.append(DatasetOut.from_mongo(doc))
else:
for doc in (
await db["datasets"]
.find()
await db["datasets_view"]
.find(
{
"$or": [
{"author.email": user_id},
{"auth": {"$elemMatch": {"user_id": {"$eq": user_id}}}},
]
}
)
.sort([("created", pymongo.DESCENDING)])
.skip(skip)
.limit(limit)
Expand All @@ -263,18 +277,29 @@ async def get_dataset(dataset_id: str, db: MongoClient = Depends(dependencies.ge
async def get_dataset_files(
dataset_id: str,
folder_id: Optional[str] = None,
user_id=Depends(get_user),
db: MongoClient = Depends(dependencies.get_db),
skip: int = 0,
limit: int = 10,
):
files = []
if folder_id is not None:
for f in (
await db["files"]
await db["files_view"]
.find(
{
"dataset_id": ObjectId(dataset_id),
"folder_id": ObjectId(folder_id),
"$and": [
{
"dataset_id": ObjectId(dataset_id),
"folder_id": ObjectId(folder_id),
},
{
"$or": [
{"creator.email": user_id},
{"auth": {"$elemMatch": {"user_id": {"$eq": user_id}}}},
]
},
]
}
)
.skip(skip)
Expand All @@ -284,11 +309,21 @@ async def get_dataset_files(
files.append(FileOut.from_mongo(f))
else:
for f in (
await db["files"]
await db["files_view"]
.find(
{
"dataset_id": ObjectId(dataset_id),
"folder_id": None,
"$and": [
{
"dataset_id": ObjectId(dataset_id),
"folder_id": None,
},
{
"$or": [
{"creator.email": user_id},
{"auth": {"$elemMatch": {"user_id": {"$eq": user_id}}}},
]
},
]
}
)
.skip(skip)
Expand Down Expand Up @@ -468,6 +503,7 @@ async def add_folder(
async def get_dataset_folders(
dataset_id: str,
parent_folder: Optional[str] = None,
user_id=Depends(get_user),
db: MongoClient = Depends(dependencies.get_db),
):
folders = []
Expand All @@ -479,8 +515,18 @@ async def get_dataset_folders(
else:
async for f in db["folders"].find(
{
"dataset_id": ObjectId(dataset_id),
"parent_folder": ObjectId(parent_folder),
"$and": [
{
"dataset_id": ObjectId(dataset_id),
"parent_folder": ObjectId(parent_folder),
},
{
"$or": [
{"author.email": user_id},
{"auth": {"$elemMatch": {"user_id": {"$eq": user_id}}}},
]
},
]
}
):
folders.append(FolderDB.from_mongo(f))
Expand Down
21 changes: 13 additions & 8 deletions backend/app/routers/jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,15 +8,15 @@

from app import dependencies
from app.models.listeners import EventListenerJob, EventListenerJobUpdate
from app.keycloak_auth import get_current_user
from app.keycloak_auth import get_current_user, get_user

router = APIRouter()


@router.get("", response_model=List[EventListenerJob])
async def get_all_job_summary(
current_user_id=Depends(get_user),
db: MongoClient = Depends(dependencies.get_db),
user=Depends(get_current_user),
listener_id: Optional[str] = None,
status: Optional[str] = None,
user_id: Optional[str] = None,
Expand All @@ -39,7 +39,14 @@ async def get_all_job_summary(
limit -- restrict number of records to be returned (i.e. for pagination)
"""
jobs = []
filters = []
filters = [
{
"$or": [
{"creator.email": current_user_id},
{"auth": {"$elemMatch": {"user_id": {"$eq": current_user_id}}}},
]
}
]
if listener_id is not None:
filters.append({"listener_id": listener_id})
if status is not None:
Expand All @@ -62,13 +69,11 @@ async def get_all_job_summary(
if dataset_id is not None:
filters.append({"resource_ref.collection": "dataset"})
filters.append({"resource_ref.resource_id": ObjectId(dataset_id)})
if len(filters) == 0:
query = {}
else:
query = {"$and": filters}

query = {"$and": filters}

for doc in (
await db["listener_jobs"]
await db["listener_jobs_view"]
.find(query)
.skip(skip)
.limit(limit)
Expand Down
1 change: 1 addition & 0 deletions docker-compose.dev.yml
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ services:
- clowder2
volumes:
- mongo:/data/db
- ./scripts/mongoviews/mongo-init-dev.js:/docker-entrypoint-initdb.d/mongo-init.js:ro

minio1:
<<: *minio-common
Expand Down
4 changes: 3 additions & 1 deletion docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,9 @@ services:
- clowder2
volumes:
- mongo:/data/db
# environment:
- ./scripts/mongoviews/mongo-init.js:/docker-entrypoint-initdb.d/mongo-init.js:ro

# environment:
# MONGO_INITDB_ROOT_USERNAME: root
# MONGO_INITDB_ROOT_PASSWORD: example

Expand Down
14 changes: 14 additions & 0 deletions scripts/mongoviews/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
# How to Import Views from Query Script

- Open Studio 3T
- Find the Views section and right click
- Select **Add Views**
![add_view](img/add_view.png)
- Select the collectino you want to base your view on. E.g. If you are importing **datasets_view.js**, you need to
select datasets collection
![base_collectin](img/base_collection.png)
- In the new interface, click the folder icon and select the view query script. e.g. **datasets_view.js**
![import](img/buttons.png)
- Click the **triangle "run"** button to test.
- If everything looks good, click the **create view** button, save it with the name following `collection name` +
`_view` pattern.
23 changes: 23 additions & 0 deletions scripts/mongoviews/datasets_view.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
db.getCollection("datasets").aggregate(

// Pipeline
[
// Stage 1
{
$lookup: {
"from" : "authorization",
"localField" : "_id",
"foreignField" : "dataset_id",
"as" : "auth"
}
}
],

// Options
{

}

// Created with Studio 3T, the IDE for MongoDB - https://studio3t.com/

);
23 changes: 23 additions & 0 deletions scripts/mongoviews/files_view.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
db.getCollection("files").aggregate(

// Pipeline
[
// Stage 1
{
$lookup: {
"from" : "authorization",
"localField" : "dataset_id",
"foreignField" : "dataset_id",
"as" : "auth"
}
}
],

// Options
{

}

// Created with Studio 3T, the IDE for MongoDB - https://studio3t.com/

);
23 changes: 23 additions & 0 deletions scripts/mongoviews/folders_view.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
db.getCollection("folders").aggregate(

// Pipeline
[
// Stage 1
{
$lookup: {
"from" : "authorization",
"localField" : "dataset_id",
"foreignField" : "dataset_id",
"as" : "auth"
}
}
],

// Options
{

}

// Created with Studio 3T, the IDE for MongoDB - https://studio3t.com/

);
Binary file added scripts/mongoviews/img/add_view.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added scripts/mongoviews/img/base_collection.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added scripts/mongoviews/img/buttons.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added scripts/mongoviews/img/save.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading