diff --git a/backend/app/routers/datasets.py b/backend/app/routers/datasets.py index a97d28d76..cf667415a 100644 --- a/backend/app/routers/datasets.py +++ b/backend/app/routers/datasets.py @@ -9,8 +9,8 @@ from typing import List, Optional from beanie import PydanticObjectId -from beanie.operators import Or from beanie.odm.operators.update.general import Inc +from beanie.operators import Or from bson import ObjectId from bson import json_util from elasticsearch import Elasticsearch @@ -18,13 +18,12 @@ APIRouter, HTTPException, Depends, - Security, File, UploadFile, Request, ) from fastapi.responses import StreamingResponse -from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer +from fastapi.security import HTTPBearer from minio import Minio from pika.adapters.blocking_connection import BlockingChannel from rocrate.model.person import Person @@ -51,8 +50,8 @@ from app.models.folders import FolderOut, FolderIn, FolderDB, FolderDBViewList from app.models.metadata import MetadataDB from app.models.pyobjectid import PyObjectId -from app.models.users import UserOut from app.models.thumbnails import ThumbnailDB +from app.models.users import UserOut from app.rabbitmq.listeners import submit_dataset_job from app.routers.files import add_file_entry, remove_file_entry from app.search.connect import ( @@ -462,6 +461,54 @@ async def save_file( raise HTTPException(status_code=404, detail=f"Dataset {dataset_id} not found") +@router.post("/{dataset_id}/filesMultiple", response_model=List[FileOut]) +async def save_files( + dataset_id: str, + files: List[UploadFile], + folder_id: Optional[str] = None, + user=Depends(get_current_user), + fs: Minio = Depends(dependencies.get_fs), + es=Depends(dependencies.get_elasticsearchclient), + rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), + allow: bool = Depends(Authorization("uploader")), +): + if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: + files_added = [] + for file in files: + if user is None: + raise HTTPException( + status_code=401, + detail=f"User not found. Session might have expired.", + ) + + new_file = FileDB(name=file.filename, creator=user, dataset_id=dataset.id) + + if folder_id is not None: + if ( + folder := await FolderDB.get(PydanticObjectId(folder_id)) + ) is not None: + new_file.folder_id = folder.id + else: + raise HTTPException( + status_code=404, detail=f"Folder {folder_id} not found" + ) + + await add_file_entry( + new_file, + user, + fs, + es, + rabbitmq_client, + file.file, + content_type=file.content_type, + ) + files_added.append(new_file.dict()) + return files_added + + else: + raise HTTPException(status_code=404, detail=f"Dataset {dataset_id} not found") + + @router.post("/createFromZip", response_model=DatasetOut) async def create_dataset_from_zip( user=Depends(get_current_user), diff --git a/backend/app/tests/test_files.py b/backend/app/tests/test_files.py index 55ead850f..4e18d5866 100644 --- a/backend/app/tests/test_files.py +++ b/backend/app/tests/test_files.py @@ -3,18 +3,26 @@ from fastapi.testclient import TestClient from app.config import settings -from app.tests.utils import create_dataset, upload_file, generate_png +from app.tests.utils import create_dataset, upload_file, upload_files, generate_png def test_create_and_delete(client: TestClient, headers: dict): dataset_id = create_dataset(client, headers).get("id") response = upload_file(client, headers, dataset_id) - file = response file_id = response["id"] # DELETE FILE response = client.delete(f"{settings.API_V2_STR}/files/{file_id}", headers=headers) assert response.status_code == 200 + response_multiple = upload_files(client, headers, dataset_id) + file_ids = [f["id"] for f in response_multiple] + # DELETE MULTIPLE FILES + for file_id in file_ids: + response = client.delete( + f"{settings.API_V2_STR}/files/{file_id}", headers=headers + ) + assert response.status_code == 200 + def test_get_one(client: TestClient, headers: dict): temp_name = "testing file.txt" diff --git a/backend/app/tests/utils.py b/backend/app/tests/utils.py index 3173ccfdc..d12775cb9 100644 --- a/backend/app/tests/utils.py +++ b/backend/app/tests/utils.py @@ -33,8 +33,11 @@ "description": "a dataset is a container of files and metadata", } -filename_example = "test_upload.csv" -file_content_example = "year,location,count\n2023,Atlanta,4" +filename_example_1 = "test_upload1.csv" +file_content_example_1 = "year,location,count\n2023,Atlanta,4" + +filename_example_2 = "test_upload2.csv" +file_content_example_2 = "year,location,count\n2022,Seattle,2" listener_v2_example = { "name": "test.listener_v2_example", @@ -143,8 +146,8 @@ def upload_file( client: TestClient, headers: dict, dataset_id: str, - filename=filename_example, - content=file_content_example, + filename=filename_example_1, + content=file_content_example_1, ): """Uploads a dummy file (optionally with custom name/content) to a dataset and returns the JSON.""" with open(filename, "w") as tempf: @@ -161,6 +164,36 @@ def upload_file( return response.json() +def upload_files( + client: TestClient, + headers: dict, + dataset_id: str, + filenames=[filename_example_1, filename_example_2], + file_contents=[file_content_example_1, file_content_example_2], +): + """Uploads a dummy file (optionally with custom name/content) to a dataset and returns the JSON.""" + upload_files = [] + for i in range(0, len(filenames)): + with open(filenames[i], "w") as tempf: + tempf.write(file_contents[i]) + upload_files.append(filenames[i]) + files = [ + ("files", open(filename_example_1, "rb")), + ("files", open(filename_example_2, "rb")), + ] + response = client.post( + f"{settings.API_V2_STR}/datasets/{dataset_id}/filesMultiple", + headers=headers, + files=files, + ) + for f in upload_files: + os.remove(f) + assert response.status_code == 200 + json_response = response.json() + assert len(json_response) == 2 + return response.json() + + def create_folder( client: TestClient, headers: dict, diff --git a/frontend/src/actions/file.js b/frontend/src/actions/file.js index 3182c5d9f..14e98e0b6 100644 --- a/frontend/src/actions/file.js +++ b/frontend/src/actions/file.js @@ -146,6 +146,42 @@ export function createFile(selectedDatasetId, folderId, selectedFile) { }; } +export const CREATE_FILES = "CREATE_FILES"; + +export function createFiles(selectedDatasetId, selectedFiles, folderId) { + return (dispatch) => { + let formData = new FormData(); + let tmp = []; + if (selectedFiles.length > 0) { + for (let i = 0; i < selectedFiles.length; i++) { + tmp.push(selectedFiles[i]); + } + } + formData["files"] = tmp; + + return V2.DatasetsService.saveFilesApiV2DatasetsDatasetIdFilesMultiplePost( + selectedDatasetId, + formData, + folderId + ) + .then((files) => { + dispatch({ + type: CREATE_FILES, + files: files, + receivedAt: Date.now(), + }); + }) + .catch((reason) => { + dispatch( + handleErrors( + reason, + createFiles(selectedDatasetId, selectedFiles, folderId) + ) + ); + }); + }; +} + export const RESET_CREATE_FILE = "RESET_CREATE_FILE"; export function resetFileCreated() { @@ -157,6 +193,17 @@ export function resetFileCreated() { }; } +export const RESET_CREATE_FILES = "RESET_CREATE_FILES"; + +export function resetFilesCreated() { + return (dispatch) => { + dispatch({ + type: RESET_CREATE_FILES, + receivedAt: Date.now(), + }); + }; +} + export const UPDATE_FILE = "UPDATE_FILE"; export function updateFile(selectedFile, fileId) { diff --git a/frontend/src/components/datasets/NewMenu.tsx b/frontend/src/components/datasets/NewMenu.tsx index 758df5bd4..d2eaea266 100644 --- a/frontend/src/components/datasets/NewMenu.tsx +++ b/frontend/src/components/datasets/NewMenu.tsx @@ -13,6 +13,7 @@ import React from "react"; import { useSelector } from "react-redux"; import { RootState } from "../../types/data"; import { UploadFile } from "../files/UploadFile"; +import { UploadFileMultiple } from "../files/UploadFileMultiple"; import UploadIcon from "@mui/icons-material/Upload"; import { Folder } from "@material-ui/icons"; @@ -29,6 +30,8 @@ export const NewMenu = (props: ActionsMenuProps): JSX.Element => { const [anchorEl, setAnchorEl] = React.useState(null); const [createFileOpen, setCreateFileOpen] = React.useState(false); + const [createMultipleFileOpen, setCreateMultipleFileOpen] = + React.useState(false); const [newFolder, setNewFolder] = React.useState(false); const handleCloseNewFolder = () => { @@ -58,6 +61,22 @@ export const NewMenu = (props: ActionsMenuProps): JSX.Element => { folderId={folderId} /> + { + setCreateMultipleFileOpen(false); + }} + fullWidth={true} + maxWidth="lg" + aria-labelledby="form-dialog" + > + + { Upload File + { + setCreateMultipleFileOpen(true); + handleOptionClose(); + }} + > + + + + Upload Multiple Files + { setNewFolder(true); diff --git a/frontend/src/components/files/UploadFileInputMultiple.tsx b/frontend/src/components/files/UploadFileInputMultiple.tsx new file mode 100644 index 000000000..3cbb9bf17 --- /dev/null +++ b/frontend/src/components/files/UploadFileInputMultiple.tsx @@ -0,0 +1,35 @@ +import React from "react"; + +import { Box, Input } from "@mui/material"; + +type UploadFileMultipleModalProps = { + setSelectedFiles: any; +}; + +// https://stackoverflow.com/questions/68213700/react-js-upload-multiple-files +export const UploadFileInputMultiple: React.FC = ( + props: UploadFileMultipleModalProps +) => { + const { setSelectedFiles } = props; + + const handleMultipleFileChange = (event) => { + // let tempFormData = new FormData(); + // for (let i = 0; i < event.target.files.length; i++) { + // tempFormData.append("files", event.target.files[i]); + // } + setSelectedFiles(event.target.files); + }; + + return ( + + + + ); +}; diff --git a/frontend/src/components/files/UploadFileMultiple.tsx b/frontend/src/components/files/UploadFileMultiple.tsx new file mode 100644 index 000000000..190f8eb25 --- /dev/null +++ b/frontend/src/components/files/UploadFileMultiple.tsx @@ -0,0 +1,248 @@ +import React, { useEffect, useState } from "react"; + +import { + Box, + Button, + Grid, + Step, + StepContent, + StepLabel, + Stepper, + Typography, +} from "@mui/material"; +import { useDispatch, useSelector } from "react-redux"; +import { RootState } from "../../types/data"; +import { CreateMetadata } from "../metadata/CreateMetadata"; +import { + fetchMetadataDefinitions, + postFileMetadata, +} from "../../actions/metadata"; +import { MetadataIn } from "../../openapi/v2"; +import { useNavigate } from "react-router-dom"; +import { + createFiles as createFilesAction, + resetFilesCreated, +} from "../../actions/file"; + +import LoadingOverlay from "react-loading-overlay-ts"; +import { UploadFileInputMultiple } from "./UploadFileInputMultiple"; +import { fetchFolderPath } from "../../actions/folder"; +import { + fetchDatasetAbout, + fetchFilesInDataset, + fetchFoldersInDataset, +} from "../../actions/dataset"; + +type UploadFileMultipleProps = { + selectedDatasetId: string | undefined; + folderId: string | undefined; + setCreateMultipleFileOpen: any; +}; + +export const UploadFileMultiple: React.FC = ( + props: UploadFileMultipleProps +) => { + const { selectedDatasetId, folderId, setCreateMultipleFileOpen } = props; + const [selectedFiles, setSelectedFiles] = useState(null); + const [metadataRequestForms, setMetadataRequestForms] = useState({}); + const [allFilled, setAllFilled] = React.useState(false); + + const [loading, setLoading] = useState(false); + + const dispatch = useDispatch(); + // @ts-ignore + const getMetadatDefinitions = ( + name: string | null, + skip: number, + limit: number + ) => dispatch(fetchMetadataDefinitions(name, skip, limit)); + const createFileMetadata = ( + fileId: string | undefined, + metadata: MetadataIn + ) => dispatch(postFileMetadata(fileId, metadata)); + + const uploadFiles = ( + selectedDatasetId: string | undefined, + selectedFiles: File[] | undefined, + selectedFolderId: string | undefined + ) => + dispatch( + createFilesAction(selectedDatasetId, selectedFiles, selectedFolderId) + ); + + const getFolderPath = (folderId: string | null) => + dispatch(fetchFolderPath(folderId)); + const listFilesInDataset = ( + datasetId: string | undefined, + folderId: string | null, + skip: number | undefined, + limit: number | undefined + ) => dispatch(fetchFilesInDataset(datasetId, folderId, skip, limit)); + const listFoldersInDataset = ( + datasetId: string | undefined, + parentFolder: string | null, + skip: number | undefined, + limit: number | undefined + ) => dispatch(fetchFoldersInDataset(datasetId, parentFolder, skip, limit)); + const listDatasetAbout = (datasetId: string | undefined) => + dispatch(fetchDatasetAbout(datasetId)); + + const newFiles = useSelector((state: RootState) => state.dataset.newFiles); + const metadataDefinitionList = useSelector( + (state: RootState) => state.metadata.metadataDefinitionList + ); + + useEffect(() => { + getMetadatDefinitions(null, 0, 100); + }, []); + + const history = useNavigate(); + const checkIfFieldsAreRequired = () => { + let required = false; + + metadataDefinitionList.forEach((val, _) => { + if (val.fields[0].required) { + required = true; + } + }); + + return required; + }; + + const checkIfFieldsAreFilled = () => { + return metadataDefinitionList.every((val) => { + return val.fields.every((field) => { + return field.required + ? metadataRequestForms[val.name] !== undefined && + metadataRequestForms[val.name].content[field.name] !== + undefined && + metadataRequestForms[val.name].content[field.name] !== "" + : true; + }); + }); + }; + + // step 1 + const setMetadata = (metadata: any) => { + // TODO wrap this in to a function + setMetadataRequestForms((prevState) => { + // merge the contents field; e.g. lat lon + if (metadata.definition in prevState) { + const prevContent = prevState[metadata.definition].content; + metadata.content = { ...prevContent, ...metadata.content }; + } + return { ...prevState, [metadata.definition]: metadata }; + }); + }; + + useEffect(() => { + if (Object.keys(metadataRequestForms).length > 0) { + setAllFilled(checkIfFieldsAreFilled(metadataRequestForms)); + } else { + setAllFilled(false); + } + }, [metadataRequestForms]); + + // step + const [activeStep, setActiveStep] = useState(0); + const handleNext = () => { + setActiveStep((prevActiveStep) => prevActiveStep + 1); + }; + const handleBack = () => { + setActiveStep((prevActiveStep) => prevActiveStep - 1); + }; + + // finish button post dataset; dataset ID triggers metadata posting + const handleFinish = () => { + // Triggers spinner + setLoading(true); + + // create dataset + uploadFiles(selectedDatasetId, selectedFiles, folderId); + }; + + const handleFinishMultiple = () => { + setLoading(true); + uploadFiles(selectedDatasetId, selectedFiles, folderId); + }; + + useEffect(() => { + if (newFiles.length > 0) { + newFiles.map((file) => { + // post new metadata + Object.keys(metadataRequestForms).map((key) => { + createFileMetadata(file.id, metadataRequestForms[key]); + }); + }); + + // reset newFile so next upload can be done + dispatch(resetFilesCreated()); + setMetadataRequestForms({}); + + // Stop spinner + setLoading(false); + + // Redirect to the first file route with file Id and dataset id + history( + `/files/${newFiles[0].id}?dataset=${selectedDatasetId}&folder=${folderId}` + ); + } + }, [newFiles]); + + return ( + + + + {/**/} + {/*step 1 Metadata*/} + + Fill In Metadata + + Provide us the metadata about your file. + + + + {/*buttons*/} + + + + + + + + + {/* step 2 attach files */} + + Attach Files + + Upload files to the dataset. + + + + + + + + + + + + + ); +}; diff --git a/frontend/src/openapi/v2/index.ts b/frontend/src/openapi/v2/index.ts index 2e3d17f12..8a469f2ab 100644 --- a/frontend/src/openapi/v2/index.ts +++ b/frontend/src/openapi/v2/index.ts @@ -14,6 +14,7 @@ export type { Body_create_dataset_from_zip_api_v2_datasets_createFromZip_post } export type { Body_get_dataset_metadata_api_v2_datasets__dataset_id__metadata_get } from './models/Body_get_dataset_metadata_api_v2_datasets__dataset_id__metadata_get'; export type { Body_get_file_metadata_api_v2_files__file_id__metadata_get } from './models/Body_get_file_metadata_api_v2_files__file_id__metadata_get'; export type { Body_save_file_api_v2_datasets__dataset_id__files_post } from './models/Body_save_file_api_v2_datasets__dataset_id__files_post'; +export type { Body_save_files_api_v2_datasets__dataset_id__filesMultiple_post } from './models/Body_save_files_api_v2_datasets__dataset_id__filesMultiple_post'; export type { Body_update_file_api_v2_files__file_id__put } from './models/Body_update_file_api_v2_files__file_id__put'; export type { ContentType } from './models/ContentType'; export type { DatasetBase } from './models/DatasetBase'; diff --git a/frontend/src/openapi/v2/models/Body_save_files_api_v2_datasets__dataset_id__filesMultiple_post.ts b/frontend/src/openapi/v2/models/Body_save_files_api_v2_datasets__dataset_id__filesMultiple_post.ts new file mode 100644 index 000000000..f64d443eb --- /dev/null +++ b/frontend/src/openapi/v2/models/Body_save_files_api_v2_datasets__dataset_id__filesMultiple_post.ts @@ -0,0 +1,7 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +export type Body_save_files_api_v2_datasets__dataset_id__filesMultiple_post = { + files: Array; +} diff --git a/frontend/src/openapi/v2/services/DatasetsService.ts b/frontend/src/openapi/v2/services/DatasetsService.ts index 470b6bb8e..fe5605ba8 100644 --- a/frontend/src/openapi/v2/services/DatasetsService.ts +++ b/frontend/src/openapi/v2/services/DatasetsService.ts @@ -3,6 +3,7 @@ /* eslint-disable */ import type { Body_create_dataset_from_zip_api_v2_datasets_createFromZip_post } from '../models/Body_create_dataset_from_zip_api_v2_datasets_createFromZip_post'; import type { Body_save_file_api_v2_datasets__dataset_id__files_post } from '../models/Body_save_file_api_v2_datasets__dataset_id__files_post'; +import type { Body_save_files_api_v2_datasets__dataset_id__filesMultiple_post } from '../models/Body_save_files_api_v2_datasets__dataset_id__filesMultiple_post'; import type { DatasetBase } from '../models/DatasetBase'; import type { DatasetIn } from '../models/DatasetIn'; import type { DatasetOut } from '../models/DatasetOut'; @@ -269,6 +270,33 @@ export class DatasetsService { }); } + /** + * Save Files + * @param datasetId + * @param formData + * @param folderId + * @returns FileOut Successful Response + * @throws ApiError + */ + public static saveFilesApiV2DatasetsDatasetIdFilesMultiplePost( + datasetId: string, + formData: Body_save_files_api_v2_datasets__dataset_id__filesMultiple_post, + folderId?: string, + ): CancelablePromise> { + return __request({ + method: 'POST', + path: `/api/v2/datasets/${datasetId}/filesMultiple`, + query: { + 'folder_id': folderId, + }, + formData: formData, + mediaType: 'multipart/form-data', + errors: { + 422: `Validation Error`, + }, + }); + } + /** * Create Dataset From Zip * @param formData diff --git a/frontend/src/reducers/dataset.ts b/frontend/src/reducers/dataset.ts index ce0c798f6..f45c4bc53 100644 --- a/frontend/src/reducers/dataset.ts +++ b/frontend/src/reducers/dataset.ts @@ -14,8 +14,10 @@ import { } from "../actions/dataset"; import { CREATE_FILE, + CREATE_FILES, DELETE_FILE, RESET_CREATE_FILE, + RESET_CREATE_FILES, UPDATE_FILE, } from "../actions/file"; import { RECEIVE_DATASET_ROLE } from "../actions/authorization"; @@ -36,6 +38,7 @@ const defaultState: DatasetState = { datasets: [], newDataset: {}, newFile: {}, + newFiles: [], roles: {}, }; @@ -47,17 +50,18 @@ const dataset = (state = defaultState, action: DataAction) => { return Object.assign({}, state, { files: state.files.filter((file) => file.id !== action.file.id), }); - // TODO rethink the pattern for file creation - // case CREATE_FILE: - // return Object.assign({}, state, { - // files: [...state.files, action.file] - // }); case CREATE_FILE: return Object.assign({}, state, { newFile: action.file, }); + case CREATE_FILES: + return Object.assign({}, state, { + newFiles: action.files, + }); case RESET_CREATE_FILE: return Object.assign({}, state, { newFile: {} }); + case RESET_CREATE_FILES: + return Object.assign({}, state, { newFiles: [] }); case SET_DATASET_GROUP_ROLE: return Object.assign({}, state, {}); case SET_DATASET_USER_ROLE: