From 8c6ccb6ce25f43f7f6b78e6521750995c9df4d63 Mon Sep 17 00:00:00 2001 From: toddn Date: Wed, 5 Oct 2022 14:58:01 -0500 Subject: [PATCH 01/34] changing parameters to dict working on adding submit dataset to extractor --- backend/app/models/extractors.py | 2 +- backend/app/routers/datasets.py | 4 ++-- backend/app/test.py | 8 ++++++++ 3 files changed, 11 insertions(+), 3 deletions(-) create mode 100644 backend/app/test.py diff --git a/backend/app/models/extractors.py b/backend/app/models/extractors.py index f68f24aaf..1976bff36 100644 --- a/backend/app/models/extractors.py +++ b/backend/app/models/extractors.py @@ -24,7 +24,7 @@ class ExtractorIdentifier(MongoModel): default_labels: List[str] = [] process: dict categories: List[str] = [] - parameters: List[dict] = [] + parameters: dict = {} class ExtractorBase(ExtractorIdentifier): diff --git a/backend/app/routers/datasets.py b/backend/app/routers/datasets.py index c321aad18..5cb3bfdfc 100644 --- a/backend/app/routers/datasets.py +++ b/backend/app/routers/datasets.py @@ -7,7 +7,7 @@ import zipfile from collections.abc import Mapping, Iterable from typing import List, Optional, Union - +import json import pika from bson import ObjectId from bson import json_util @@ -706,7 +706,7 @@ async def get_dataset_extract( token = token.lstrip("Bearer") token = token.lstrip(" ") # TODO check of extractor exists - msg = {"message": "testing", "dataseet_id": dataset_id} + msg = {"message": "testing", "dataset_id": dataset_id} body = {} body["secretKey"] = token body["token"] = token diff --git a/backend/app/test.py b/backend/app/test.py new file mode 100644 index 000000000..0a98a4c17 --- /dev/null +++ b/backend/app/test.py @@ -0,0 +1,8 @@ +import time + +if __name__ == "__main__": + with open('testfile.txt') as f: + for i in range(0, 10): + line = 'line number ' + str(i) + '\n' + f.write(line) + print('done') \ No newline at end of file From 21169c7cb5d186474f31609f0d078900b6f737ab Mon Sep 17 00:00:00 2001 From: toddn Date: Wed, 5 Oct 2022 15:13:40 -0500 Subject: [PATCH 02/34] not sure if parameters are being sent properly --- backend/app/routers/datasets.py | 1 + 1 file changed, 1 insertion(+) diff --git a/backend/app/routers/datasets.py b/backend/app/routers/datasets.py index 5cb3bfdfc..2001c42f7 100644 --- a/backend/app/routers/datasets.py +++ b/backend/app/routers/datasets.py @@ -720,6 +720,7 @@ async def get_dataset_extract( current_queue = req_info["extractor"] if "parameters" in req_info: current_parameters = req_info["parameters"] + body['parameters'] = current_parameters current_routing_key = "extractors." + current_queue rabbitmq_client.queue_bind( exchange="extractors", From 53826168e6fb20be78af43b26da1664a6940082e Mon Sep 17 00:00:00 2001 From: toddn Date: Thu, 6 Oct 2022 12:55:55 -0500 Subject: [PATCH 03/34] handling parameters dict not list dict --- backend/app/routers/files.py | 1 + 1 file changed, 1 insertion(+) diff --git a/backend/app/routers/files.py b/backend/app/routers/files.py index c9114fb69..be70ea4f8 100644 --- a/backend/app/routers/files.py +++ b/backend/app/routers/files.py @@ -285,6 +285,7 @@ async def get_file_extract( current_queue = req_info["extractor"] if "parameters" in req_info: current_parameters = req_info["parameters"] + body['parameters'] = current_parameters current_routing_key = "extractors." + current_queue rabbitmq_client.queue_bind( exchange="extractors", From 2bf9843578aa221bc7b4bc9279fe539b1d1b2f34 Mon Sep 17 00:00:00 2001 From: toddn Date: Thu, 6 Oct 2022 13:10:03 -0500 Subject: [PATCH 04/34] black formatting --- backend/app/routers/datasets.py | 2 +- backend/app/routers/files.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/backend/app/routers/datasets.py b/backend/app/routers/datasets.py index 2001c42f7..6fe60dad4 100644 --- a/backend/app/routers/datasets.py +++ b/backend/app/routers/datasets.py @@ -720,7 +720,7 @@ async def get_dataset_extract( current_queue = req_info["extractor"] if "parameters" in req_info: current_parameters = req_info["parameters"] - body['parameters'] = current_parameters + body["parameters"] = current_parameters current_routing_key = "extractors." + current_queue rabbitmq_client.queue_bind( exchange="extractors", diff --git a/backend/app/routers/files.py b/backend/app/routers/files.py index be70ea4f8..d67e41905 100644 --- a/backend/app/routers/files.py +++ b/backend/app/routers/files.py @@ -285,7 +285,7 @@ async def get_file_extract( current_queue = req_info["extractor"] if "parameters" in req_info: current_parameters = req_info["parameters"] - body['parameters'] = current_parameters + body["parameters"] = current_parameters current_routing_key = "extractors." + current_queue rabbitmq_client.queue_bind( exchange="extractors", From e52daef2713cd5c20ab4c6dd2033d3585de9a12f Mon Sep 17 00:00:00 2001 From: toddn Date: Thu, 6 Oct 2022 13:27:23 -0500 Subject: [PATCH 05/34] new pipfile lock --- backend/Pipfile.lock | 52 ++++++++++++++++++++++---------------------- 1 file changed, 26 insertions(+), 26 deletions(-) diff --git a/backend/Pipfile.lock b/backend/Pipfile.lock index 96b0a1205..278155dd0 100644 --- a/backend/Pipfile.lock +++ b/backend/Pipfile.lock @@ -18,11 +18,11 @@ "default": { "aio-pika": { "hashes": [ - "sha256:0fe411be03f352389f0e12445f19e0c9e3a77ea03d334c33067239027387e0ba", - "sha256:5b610a217f9aebd1773e6383f506da6df4e962ceaa3f3cf786fdbf669ce7b86f" + "sha256:71f0a67cc45bdd0e6b64121f2eda8462937a2d83266855a55d7d8c106c14d3d6", + "sha256:997e74efab11c34f37ff20e91316a4d7f5d3ae13167e3adec809aac09c7b84aa" ], "index": "pypi", - "version": "==8.2.2" + "version": "==8.2.4" }, "aiohttp": { "hashes": [ @@ -98,11 +98,11 @@ }, "beanie": { "hashes": [ - "sha256:a4e16d2cbb0ba0cd2609e213401a5dfd1bf1ed608451aee9327038d28bef93e2", - "sha256:def2c563068994ec0d09682c1d2340986989572486869409d4117eb5f3dd64d5" + "sha256:0d27c089fc316fbbe1effe09104f0d1b62185fd8cae766b322a05254f9018eb2", + "sha256:740d8479b17bc78b16b9636b274a01878844fc87aef01f8bffa97c967bcecfd9" ], "index": "pypi", - "version": "==1.11.11" + "version": "==1.12.0" }, "bioblend": { "hashes": [ @@ -139,7 +139,7 @@ "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845", "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f" ], - "markers": "python_version >= '3.6'", + "markers": "python_full_version >= '3.6.0'", "version": "==2.1.1" }, "click": { @@ -162,7 +162,7 @@ "sha256:0f7569a4a6ff151958b64304071d370daa3243d15941a7beedf0c9fe5105603e", "sha256:a851e51367fb93e9e1361732c1d60dab63eff98712e503ea7d92e6eccb109b4f" ], - "markers": "python_version >= '3.6' and python_version < '4'", + "markers": "python_version >= '3.6' and python_version < '4.0'", "version": "==2.2.1" }, "ecdsa": { @@ -183,11 +183,11 @@ }, "elasticsearch": { "hashes": [ - "sha256:8496f5ee4974c127f6d1cee0c48ab185a086bc1c9edba429f158b9a95bb75411", - "sha256:e9d61209908e3e26ae9ab4b5d7eb2b2387cf4578d20a1afc8eb649dfc9350efe" + "sha256:14c68a96b7bbbf150dd9fca5ff65da9c50e791c0fdba474a328e43828fdd7f42", + "sha256:d34d43a6c349d15c9d91840f791eeba80fc50ee070caf6695130f56b7f41a02d" ], "index": "pypi", - "version": "==8.4.2" + "version": "==8.4.3" }, "email-validator": { "hashes": [ @@ -315,11 +315,11 @@ }, "minio": { "hashes": [ - "sha256:12ac2d1d4fd3cea159d625847445e1bfceba3fbc2f4ab692c2d2bf716f82246c", - "sha256:1cab424275749b8b5b8bb0c6cc856d667305ef549796ae56f3237fe55306a1fc" + "sha256:63111fedf67e07c5a4c8948b3a4e5ecbb372b522ea562bfa4d484194ec6a2b99", + "sha256:c8ab8646f93d47b9aefbf4db76aaba5ac54c87454b922a3d6c1423aed050aad5" ], "index": "pypi", - "version": "==7.1.11" + "version": "==7.1.12" }, "mongoengine": { "hashes": [ @@ -386,11 +386,11 @@ }, "pipenv": { "hashes": [ - "sha256:22dd3601ab86b2d1caa36a422c4ec6505e2484e71e3b733554c3c3bb8dbe9606", - "sha256:d682375d6a6edd2f1ed2f76085b7191de149ff8381bce6c1aaf7f55061b04457" + "sha256:dc2539c7f4ad10737f6c211493f99e2bbc8161571d71ac29f162dfed86886bb0", + "sha256:fc1982e47e8214f47713efadf61cd61ff643b5988372a83edd040cf0f7d942f2" ], "index": "pypi", - "version": "==2022.9.24" + "version": "==2022.10.4" }, "pipfile": { "hashes": [ @@ -555,11 +555,11 @@ }, "python-keycloak": { "hashes": [ - "sha256:b401d2c67dc1b9e2dbb3309ef2012c2d178584925dc14bd07f6bd2416e5e3ff8", - "sha256:ed1c1935ceaf5d7f928b1b3ab945130f7d54685e4b17da053dbc7bfee0c0271e" + "sha256:08c530ff86f631faccb8033d9d9345cc3148cb2cf132ff7564f025292e4dbd96", + "sha256:a1ce102b978beb56d385319b3ca20992b915c2c12d15a2d0c23f1104882f3fb6" ], "index": "pypi", - "version": "==2.5.0" + "version": "==2.6.0" }, "python-multipart": { "hashes": [ @@ -647,11 +647,11 @@ }, "setuptools": { "hashes": [ - "sha256:a8f6e213b4b0661f590ccf40de95d28a177cd747d098624ad3f69c40287297e9", - "sha256:c2d2709550f15aab6c9110196ea312f468f41cd546bceb24127a1be6fdcaeeb1" + "sha256:1b6bdc6161661409c5f21508763dc63ab20a9ac2f8ba20029aaaa7fdb9118012", + "sha256:3050e338e5871e70c72983072fe34f6032ae1cdeeeb67338199c2f74e083a80e" ], "markers": "python_version >= '3.7'", - "version": "==65.4.0" + "version": "==65.4.1" }, "six": { "hashes": [ @@ -682,7 +682,7 @@ "sha256:357eb7383dee6915f17b00596ec6dd2a890f3117bf52be28a4c516aeee581100", "sha256:e2cdf6e2dad49813e9b5fceb3c7943387309a8738125fbff0b58d248a033f7a9" ], - "markers": "python_version >= '3.6' and python_version < '4'", + "markers": "python_version >= '3.6' and python_version < '4.0'", "version": "==4.7.0" }, "toml": { @@ -856,7 +856,7 @@ "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845", "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f" ], - "markers": "python_version >= '3.6'", + "markers": "python_full_version >= '3.6.0'", "version": "==2.1.1" }, "click": { @@ -958,7 +958,7 @@ "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc", "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f" ], - "markers": "python_version >= '3.7'", + "markers": "python_full_version < '3.11.0a7'", "version": "==2.0.1" }, "typing-extensions": { From cef2aadc05df6b49edac8354ab971c85b21b3901 Mon Sep 17 00:00:00 2001 From: toddn Date: Thu, 6 Oct 2022 13:27:35 -0500 Subject: [PATCH 06/34] delete test --- backend/app/test.py | 8 -------- 1 file changed, 8 deletions(-) delete mode 100644 backend/app/test.py diff --git a/backend/app/test.py b/backend/app/test.py deleted file mode 100644 index 0a98a4c17..000000000 --- a/backend/app/test.py +++ /dev/null @@ -1,8 +0,0 @@ -import time - -if __name__ == "__main__": - with open('testfile.txt') as f: - for i in range(0, 10): - line = 'line number ' + str(i) + '\n' - f.write(line) - print('done') \ No newline at end of file From ececa5a8e97546df81521ef95c9a74213382ba4d Mon Sep 17 00:00:00 2001 From: toddn Date: Thu, 6 Oct 2022 13:54:09 -0500 Subject: [PATCH 07/34] does this fix build errors? --- backend/Pipfile.lock | 52 ++++++++++++++++++++++---------------------- 1 file changed, 26 insertions(+), 26 deletions(-) diff --git a/backend/Pipfile.lock b/backend/Pipfile.lock index 278155dd0..96b0a1205 100644 --- a/backend/Pipfile.lock +++ b/backend/Pipfile.lock @@ -18,11 +18,11 @@ "default": { "aio-pika": { "hashes": [ - "sha256:71f0a67cc45bdd0e6b64121f2eda8462937a2d83266855a55d7d8c106c14d3d6", - "sha256:997e74efab11c34f37ff20e91316a4d7f5d3ae13167e3adec809aac09c7b84aa" + "sha256:0fe411be03f352389f0e12445f19e0c9e3a77ea03d334c33067239027387e0ba", + "sha256:5b610a217f9aebd1773e6383f506da6df4e962ceaa3f3cf786fdbf669ce7b86f" ], "index": "pypi", - "version": "==8.2.4" + "version": "==8.2.2" }, "aiohttp": { "hashes": [ @@ -98,11 +98,11 @@ }, "beanie": { "hashes": [ - "sha256:0d27c089fc316fbbe1effe09104f0d1b62185fd8cae766b322a05254f9018eb2", - "sha256:740d8479b17bc78b16b9636b274a01878844fc87aef01f8bffa97c967bcecfd9" + "sha256:a4e16d2cbb0ba0cd2609e213401a5dfd1bf1ed608451aee9327038d28bef93e2", + "sha256:def2c563068994ec0d09682c1d2340986989572486869409d4117eb5f3dd64d5" ], "index": "pypi", - "version": "==1.12.0" + "version": "==1.11.11" }, "bioblend": { "hashes": [ @@ -139,7 +139,7 @@ "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845", "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f" ], - "markers": "python_full_version >= '3.6.0'", + "markers": "python_version >= '3.6'", "version": "==2.1.1" }, "click": { @@ -162,7 +162,7 @@ "sha256:0f7569a4a6ff151958b64304071d370daa3243d15941a7beedf0c9fe5105603e", "sha256:a851e51367fb93e9e1361732c1d60dab63eff98712e503ea7d92e6eccb109b4f" ], - "markers": "python_version >= '3.6' and python_version < '4.0'", + "markers": "python_version >= '3.6' and python_version < '4'", "version": "==2.2.1" }, "ecdsa": { @@ -183,11 +183,11 @@ }, "elasticsearch": { "hashes": [ - "sha256:14c68a96b7bbbf150dd9fca5ff65da9c50e791c0fdba474a328e43828fdd7f42", - "sha256:d34d43a6c349d15c9d91840f791eeba80fc50ee070caf6695130f56b7f41a02d" + "sha256:8496f5ee4974c127f6d1cee0c48ab185a086bc1c9edba429f158b9a95bb75411", + "sha256:e9d61209908e3e26ae9ab4b5d7eb2b2387cf4578d20a1afc8eb649dfc9350efe" ], "index": "pypi", - "version": "==8.4.3" + "version": "==8.4.2" }, "email-validator": { "hashes": [ @@ -315,11 +315,11 @@ }, "minio": { "hashes": [ - "sha256:63111fedf67e07c5a4c8948b3a4e5ecbb372b522ea562bfa4d484194ec6a2b99", - "sha256:c8ab8646f93d47b9aefbf4db76aaba5ac54c87454b922a3d6c1423aed050aad5" + "sha256:12ac2d1d4fd3cea159d625847445e1bfceba3fbc2f4ab692c2d2bf716f82246c", + "sha256:1cab424275749b8b5b8bb0c6cc856d667305ef549796ae56f3237fe55306a1fc" ], "index": "pypi", - "version": "==7.1.12" + "version": "==7.1.11" }, "mongoengine": { "hashes": [ @@ -386,11 +386,11 @@ }, "pipenv": { "hashes": [ - "sha256:dc2539c7f4ad10737f6c211493f99e2bbc8161571d71ac29f162dfed86886bb0", - "sha256:fc1982e47e8214f47713efadf61cd61ff643b5988372a83edd040cf0f7d942f2" + "sha256:22dd3601ab86b2d1caa36a422c4ec6505e2484e71e3b733554c3c3bb8dbe9606", + "sha256:d682375d6a6edd2f1ed2f76085b7191de149ff8381bce6c1aaf7f55061b04457" ], "index": "pypi", - "version": "==2022.10.4" + "version": "==2022.9.24" }, "pipfile": { "hashes": [ @@ -555,11 +555,11 @@ }, "python-keycloak": { "hashes": [ - "sha256:08c530ff86f631faccb8033d9d9345cc3148cb2cf132ff7564f025292e4dbd96", - "sha256:a1ce102b978beb56d385319b3ca20992b915c2c12d15a2d0c23f1104882f3fb6" + "sha256:b401d2c67dc1b9e2dbb3309ef2012c2d178584925dc14bd07f6bd2416e5e3ff8", + "sha256:ed1c1935ceaf5d7f928b1b3ab945130f7d54685e4b17da053dbc7bfee0c0271e" ], "index": "pypi", - "version": "==2.6.0" + "version": "==2.5.0" }, "python-multipart": { "hashes": [ @@ -647,11 +647,11 @@ }, "setuptools": { "hashes": [ - "sha256:1b6bdc6161661409c5f21508763dc63ab20a9ac2f8ba20029aaaa7fdb9118012", - "sha256:3050e338e5871e70c72983072fe34f6032ae1cdeeeb67338199c2f74e083a80e" + "sha256:a8f6e213b4b0661f590ccf40de95d28a177cd747d098624ad3f69c40287297e9", + "sha256:c2d2709550f15aab6c9110196ea312f468f41cd546bceb24127a1be6fdcaeeb1" ], "markers": "python_version >= '3.7'", - "version": "==65.4.1" + "version": "==65.4.0" }, "six": { "hashes": [ @@ -682,7 +682,7 @@ "sha256:357eb7383dee6915f17b00596ec6dd2a890f3117bf52be28a4c516aeee581100", "sha256:e2cdf6e2dad49813e9b5fceb3c7943387309a8738125fbff0b58d248a033f7a9" ], - "markers": "python_version >= '3.6' and python_version < '4.0'", + "markers": "python_version >= '3.6' and python_version < '4'", "version": "==4.7.0" }, "toml": { @@ -856,7 +856,7 @@ "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845", "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f" ], - "markers": "python_full_version >= '3.6.0'", + "markers": "python_version >= '3.6'", "version": "==2.1.1" }, "click": { @@ -958,7 +958,7 @@ "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc", "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f" ], - "markers": "python_full_version < '3.11.0a7'", + "markers": "python_version >= '3.7'", "version": "==2.0.1" }, "typing-extensions": { From a8a4f521deb6df4914772146d27c26f6f0537867 Mon Sep 17 00:00:00 2001 From: toddn Date: Thu, 6 Oct 2022 14:01:55 -0500 Subject: [PATCH 08/34] new pipfile lock --- backend/Pipfile.lock | 52 ++++++++++++++++++++++---------------------- 1 file changed, 26 insertions(+), 26 deletions(-) diff --git a/backend/Pipfile.lock b/backend/Pipfile.lock index 96b0a1205..278155dd0 100644 --- a/backend/Pipfile.lock +++ b/backend/Pipfile.lock @@ -18,11 +18,11 @@ "default": { "aio-pika": { "hashes": [ - "sha256:0fe411be03f352389f0e12445f19e0c9e3a77ea03d334c33067239027387e0ba", - "sha256:5b610a217f9aebd1773e6383f506da6df4e962ceaa3f3cf786fdbf669ce7b86f" + "sha256:71f0a67cc45bdd0e6b64121f2eda8462937a2d83266855a55d7d8c106c14d3d6", + "sha256:997e74efab11c34f37ff20e91316a4d7f5d3ae13167e3adec809aac09c7b84aa" ], "index": "pypi", - "version": "==8.2.2" + "version": "==8.2.4" }, "aiohttp": { "hashes": [ @@ -98,11 +98,11 @@ }, "beanie": { "hashes": [ - "sha256:a4e16d2cbb0ba0cd2609e213401a5dfd1bf1ed608451aee9327038d28bef93e2", - "sha256:def2c563068994ec0d09682c1d2340986989572486869409d4117eb5f3dd64d5" + "sha256:0d27c089fc316fbbe1effe09104f0d1b62185fd8cae766b322a05254f9018eb2", + "sha256:740d8479b17bc78b16b9636b274a01878844fc87aef01f8bffa97c967bcecfd9" ], "index": "pypi", - "version": "==1.11.11" + "version": "==1.12.0" }, "bioblend": { "hashes": [ @@ -139,7 +139,7 @@ "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845", "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f" ], - "markers": "python_version >= '3.6'", + "markers": "python_full_version >= '3.6.0'", "version": "==2.1.1" }, "click": { @@ -162,7 +162,7 @@ "sha256:0f7569a4a6ff151958b64304071d370daa3243d15941a7beedf0c9fe5105603e", "sha256:a851e51367fb93e9e1361732c1d60dab63eff98712e503ea7d92e6eccb109b4f" ], - "markers": "python_version >= '3.6' and python_version < '4'", + "markers": "python_version >= '3.6' and python_version < '4.0'", "version": "==2.2.1" }, "ecdsa": { @@ -183,11 +183,11 @@ }, "elasticsearch": { "hashes": [ - "sha256:8496f5ee4974c127f6d1cee0c48ab185a086bc1c9edba429f158b9a95bb75411", - "sha256:e9d61209908e3e26ae9ab4b5d7eb2b2387cf4578d20a1afc8eb649dfc9350efe" + "sha256:14c68a96b7bbbf150dd9fca5ff65da9c50e791c0fdba474a328e43828fdd7f42", + "sha256:d34d43a6c349d15c9d91840f791eeba80fc50ee070caf6695130f56b7f41a02d" ], "index": "pypi", - "version": "==8.4.2" + "version": "==8.4.3" }, "email-validator": { "hashes": [ @@ -315,11 +315,11 @@ }, "minio": { "hashes": [ - "sha256:12ac2d1d4fd3cea159d625847445e1bfceba3fbc2f4ab692c2d2bf716f82246c", - "sha256:1cab424275749b8b5b8bb0c6cc856d667305ef549796ae56f3237fe55306a1fc" + "sha256:63111fedf67e07c5a4c8948b3a4e5ecbb372b522ea562bfa4d484194ec6a2b99", + "sha256:c8ab8646f93d47b9aefbf4db76aaba5ac54c87454b922a3d6c1423aed050aad5" ], "index": "pypi", - "version": "==7.1.11" + "version": "==7.1.12" }, "mongoengine": { "hashes": [ @@ -386,11 +386,11 @@ }, "pipenv": { "hashes": [ - "sha256:22dd3601ab86b2d1caa36a422c4ec6505e2484e71e3b733554c3c3bb8dbe9606", - "sha256:d682375d6a6edd2f1ed2f76085b7191de149ff8381bce6c1aaf7f55061b04457" + "sha256:dc2539c7f4ad10737f6c211493f99e2bbc8161571d71ac29f162dfed86886bb0", + "sha256:fc1982e47e8214f47713efadf61cd61ff643b5988372a83edd040cf0f7d942f2" ], "index": "pypi", - "version": "==2022.9.24" + "version": "==2022.10.4" }, "pipfile": { "hashes": [ @@ -555,11 +555,11 @@ }, "python-keycloak": { "hashes": [ - "sha256:b401d2c67dc1b9e2dbb3309ef2012c2d178584925dc14bd07f6bd2416e5e3ff8", - "sha256:ed1c1935ceaf5d7f928b1b3ab945130f7d54685e4b17da053dbc7bfee0c0271e" + "sha256:08c530ff86f631faccb8033d9d9345cc3148cb2cf132ff7564f025292e4dbd96", + "sha256:a1ce102b978beb56d385319b3ca20992b915c2c12d15a2d0c23f1104882f3fb6" ], "index": "pypi", - "version": "==2.5.0" + "version": "==2.6.0" }, "python-multipart": { "hashes": [ @@ -647,11 +647,11 @@ }, "setuptools": { "hashes": [ - "sha256:a8f6e213b4b0661f590ccf40de95d28a177cd747d098624ad3f69c40287297e9", - "sha256:c2d2709550f15aab6c9110196ea312f468f41cd546bceb24127a1be6fdcaeeb1" + "sha256:1b6bdc6161661409c5f21508763dc63ab20a9ac2f8ba20029aaaa7fdb9118012", + "sha256:3050e338e5871e70c72983072fe34f6032ae1cdeeeb67338199c2f74e083a80e" ], "markers": "python_version >= '3.7'", - "version": "==65.4.0" + "version": "==65.4.1" }, "six": { "hashes": [ @@ -682,7 +682,7 @@ "sha256:357eb7383dee6915f17b00596ec6dd2a890f3117bf52be28a4c516aeee581100", "sha256:e2cdf6e2dad49813e9b5fceb3c7943387309a8738125fbff0b58d248a033f7a9" ], - "markers": "python_version >= '3.6' and python_version < '4'", + "markers": "python_version >= '3.6' and python_version < '4.0'", "version": "==4.7.0" }, "toml": { @@ -856,7 +856,7 @@ "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845", "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f" ], - "markers": "python_version >= '3.6'", + "markers": "python_full_version >= '3.6.0'", "version": "==2.1.1" }, "click": { @@ -958,7 +958,7 @@ "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc", "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f" ], - "markers": "python_version >= '3.7'", + "markers": "python_full_version < '3.11.0a7'", "version": "==2.0.1" }, "typing-extensions": { From c4bd7cae3c36c49ebd6173e44fdf7095c7e44c63 Mon Sep 17 00:00:00 2001 From: toddn Date: Thu, 6 Oct 2022 16:52:08 -0500 Subject: [PATCH 09/34] new Pipfile.lock --- backend/Pipfile.lock | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/backend/Pipfile.lock b/backend/Pipfile.lock index 278155dd0..547f19e7a 100644 --- a/backend/Pipfile.lock +++ b/backend/Pipfile.lock @@ -170,7 +170,7 @@ "sha256:190348041559e21b22a1d65cee485282ca11a6f81d503fddb84d5017e9ed1e49", "sha256:80600258e7ed2f16b9aa1d7c295bd70194109ad5a30fdee0eaeefef1d4c559dd" ], - "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2'", + "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==0.18.0" }, "elastic-transport": { @@ -217,7 +217,7 @@ "hashes": [ "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d" ], - "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2'", + "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==0.18.2" }, "galaxy2cwl": { @@ -543,7 +543,7 @@ "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86", "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'", + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.8.2" }, "python-jose": { @@ -658,7 +658,7 @@ "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'", + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.16.0" }, "sniffio": { @@ -690,7 +690,7 @@ "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f" ], - "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2'", + "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==0.10.2" }, "tuspy": { From 9dd23aa12939aee5213e3f8a33233f5bb607c4f7 Mon Sep 17 00:00:00 2001 From: toddn Date: Tue, 11 Oct 2022 13:09:21 -0500 Subject: [PATCH 10/34] adding tests for extractors also adding a sample extractor_info.json file for registration purposes --- backend/app/tests/extractor_info.json | 30 +++++++++++++++++++ backend/app/tests/test_extractors.py | 42 +++++++++++++++++++++++++++ 2 files changed, 72 insertions(+) create mode 100644 backend/app/tests/extractor_info.json create mode 100644 backend/app/tests/test_extractors.py diff --git a/backend/app/tests/extractor_info.json b/backend/app/tests/extractor_info.json new file mode 100644 index 000000000..6009f5c2b --- /dev/null +++ b/backend/app/tests/extractor_info.json @@ -0,0 +1,30 @@ +{ + "@context": "http://clowder.ncsa.illinois.edu/contexts/extractors.jsonld", + "name": "ncsa.wordcount", + "version": "2.0", + "description": "WordCount extractor. Counts the number of characters, words and lines in the text file that was uploaded.", + "author": "Rob Kooper ", + "contributors": [], + "contexts": [ + { + "lines": "http://clowder.ncsa.illinois.edu/metadata/ncsa.wordcount#lines", + "words": "http://clowder.ncsa.illinois.edu/metadata/ncsa.wordcount#words", + "characters": "http://clowder.ncsa.illinois.edu/metadata/ncsa.wordcount#characters" + } + ], + "repository": [ + { + "repType": "git", + "repUrl": "https://opensource.ncsa.illinois.edu/stash/scm/cats/pyclowder.git" + } + ], + "process": { + "file": [ + "text/*", + "application/json" + ] + }, + "external_services": [], + "dependencies": [], + "bibtex": [] +} diff --git a/backend/app/tests/test_extractors.py b/backend/app/tests/test_extractors.py new file mode 100644 index 000000000..c02d84a06 --- /dev/null +++ b/backend/app/tests/test_extractors.py @@ -0,0 +1,42 @@ +import os +from fastapi.testclient import TestClient +from app.config import settings +from app.models.pyobjectid import PyObjectId + +def test_register(client: TestClient, headers: dict): + with open('extractor_info.json', 'r') as f: + extractor_info = f.read() + response = client.post( + f"{settings.API_V2_STR}/extractors", json=extractor_info, headers=headers + ) + assert response.json().get("id") is not None + assert response.status_code == 200 + +def test_get_one(client: TestClient, headers: dict): + with open('extractor_info.json', 'r') as f: + extractor_info = f.read() + response = client.post( + f"{settings.API_V2_STR}/extractors", json=extractor_info, headers=headers + ) + assert response.status_code == 200 + assert response.json().get("id") is not None + extractor_id = response.json().get("id") + response = client.get( + f"{settings.API_V2_STR}/extractors/{extractor_id}", headers=headers + ) + assert response.status_code == 200 + assert response.json().get("id") is not None + +def test_delete(client: TestClient, headers: dict): + with open('extractor_info.json', 'r') as f: + extractor_info = f.read() + response = client.post( + f"{settings.API_V2_STR}/extractors", json=extractor_info, headers=headers + ) + assert response.status_code == 200 + assert response.json().get("id") is not None + extractor_id = response.json().get("id") + response = client.delete( + f"{settings.API_V2_STR}/extractors/{extractor_id}", headers=headers + ) + assert response.status_code == 200 \ No newline at end of file From edfb976fe7e6234cd48e15cb09d0dc1de6cb7cca Mon Sep 17 00:00:00 2001 From: toddn Date: Tue, 11 Oct 2022 13:14:19 -0500 Subject: [PATCH 11/34] does this fix file not found error? --- backend/app/tests/test_extractors.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/backend/app/tests/test_extractors.py b/backend/app/tests/test_extractors.py index c02d84a06..00f3487bf 100644 --- a/backend/app/tests/test_extractors.py +++ b/backend/app/tests/test_extractors.py @@ -3,8 +3,11 @@ from app.config import settings from app.models.pyobjectid import PyObjectId +extractor_info_file = os.path.join(os.getcwd(), 'extractor_info.json') + + def test_register(client: TestClient, headers: dict): - with open('extractor_info.json', 'r') as f: + with open(extractor_info_file, 'r') as f: extractor_info = f.read() response = client.post( f"{settings.API_V2_STR}/extractors", json=extractor_info, headers=headers @@ -13,7 +16,7 @@ def test_register(client: TestClient, headers: dict): assert response.status_code == 200 def test_get_one(client: TestClient, headers: dict): - with open('extractor_info.json', 'r') as f: + with open(extractor_info_file, 'r') as f: extractor_info = f.read() response = client.post( f"{settings.API_V2_STR}/extractors", json=extractor_info, headers=headers @@ -28,7 +31,7 @@ def test_get_one(client: TestClient, headers: dict): assert response.json().get("id") is not None def test_delete(client: TestClient, headers: dict): - with open('extractor_info.json', 'r') as f: + with open(extractor_info_file, 'r') as f: extractor_info = f.read() response = client.post( f"{settings.API_V2_STR}/extractors", json=extractor_info, headers=headers From 735de2e720b9999d53db61355f1fa444ee86f560 Mon Sep 17 00:00:00 2001 From: toddn Date: Tue, 11 Oct 2022 16:29:53 -0500 Subject: [PATCH 12/34] trying adding it as dict --- backend/app/tests/test_extractors.py | 39 +++++++++++++++++++++++----- 1 file changed, 32 insertions(+), 7 deletions(-) diff --git a/backend/app/tests/test_extractors.py b/backend/app/tests/test_extractors.py index 00f3487bf..c46490bdb 100644 --- a/backend/app/tests/test_extractors.py +++ b/backend/app/tests/test_extractors.py @@ -3,12 +3,41 @@ from app.config import settings from app.models.pyobjectid import PyObjectId -extractor_info_file = os.path.join(os.getcwd(), 'extractor_info.json') +extractor_info = { + "@context": "http://clowder.ncsa.illinois.edu/contexts/extractors.jsonld", + "name": "ncsa.wordcount", + "version": "2.0", + "description": "WordCount extractor. Counts the number of characters, words and lines in the text file that was uploaded.", + "author": "Rob Kooper ", + "contributors": [], + "contexts": [ + { + "lines": "http://clowder.ncsa.illinois.edu/metadata/ncsa.wordcount#lines", + "words": "http://clowder.ncsa.illinois.edu/metadata/ncsa.wordcount#words", + "characters": "http://clowder.ncsa.illinois.edu/metadata/ncsa.wordcount#characters" + } + ], + "repository": [ + { + "repType": "git", + "repUrl": "https://opensource.ncsa.illinois.edu/stash/scm/cats/pyclowder.git" + } + ], + "process": { + "file": [ + "text/*", + "application/json" + ] + }, + "external_services": [], + "dependencies": [], + "bibtex": [] +} + +# extractor_info_file = os.path.join(os.getcwd(), 'extractor_info.json') def test_register(client: TestClient, headers: dict): - with open(extractor_info_file, 'r') as f: - extractor_info = f.read() response = client.post( f"{settings.API_V2_STR}/extractors", json=extractor_info, headers=headers ) @@ -16,8 +45,6 @@ def test_register(client: TestClient, headers: dict): assert response.status_code == 200 def test_get_one(client: TestClient, headers: dict): - with open(extractor_info_file, 'r') as f: - extractor_info = f.read() response = client.post( f"{settings.API_V2_STR}/extractors", json=extractor_info, headers=headers ) @@ -31,8 +58,6 @@ def test_get_one(client: TestClient, headers: dict): assert response.json().get("id") is not None def test_delete(client: TestClient, headers: dict): - with open(extractor_info_file, 'r') as f: - extractor_info = f.read() response = client.post( f"{settings.API_V2_STR}/extractors", json=extractor_info, headers=headers ) From c5ea19101c0b642bcb7ab9bac184fe068a64b3d8 Mon Sep 17 00:00:00 2001 From: toddn Date: Tue, 11 Oct 2022 16:36:34 -0500 Subject: [PATCH 13/34] adding user - will this fix the tests? --- backend/app/tests/test_extractors.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/backend/app/tests/test_extractors.py b/backend/app/tests/test_extractors.py index c46490bdb..4343c34ef 100644 --- a/backend/app/tests/test_extractors.py +++ b/backend/app/tests/test_extractors.py @@ -3,6 +3,13 @@ from app.config import settings from app.models.pyobjectid import PyObjectId +user = { + "email": "test@test.org", + "password": "not_a_password", + "first_name": "Foo", + "last_name": "Bar", +} + extractor_info = { "@context": "http://clowder.ncsa.illinois.edu/contexts/extractors.jsonld", "name": "ncsa.wordcount", From 12db5ad7df9dc486f8f3b20dff25120d9492a918 Mon Sep 17 00:00:00 2001 From: toddn Date: Tue, 11 Oct 2022 16:40:40 -0500 Subject: [PATCH 14/34] black formatting --- backend/app/tests/test_extractors.py | 55 +++++++++++++--------------- 1 file changed, 26 insertions(+), 29 deletions(-) diff --git a/backend/app/tests/test_extractors.py b/backend/app/tests/test_extractors.py index 4343c34ef..00428403a 100644 --- a/backend/app/tests/test_extractors.py +++ b/backend/app/tests/test_extractors.py @@ -11,34 +11,29 @@ } extractor_info = { - "@context": "http://clowder.ncsa.illinois.edu/contexts/extractors.jsonld", - "name": "ncsa.wordcount", - "version": "2.0", - "description": "WordCount extractor. Counts the number of characters, words and lines in the text file that was uploaded.", - "author": "Rob Kooper ", - "contributors": [], - "contexts": [ - { - "lines": "http://clowder.ncsa.illinois.edu/metadata/ncsa.wordcount#lines", - "words": "http://clowder.ncsa.illinois.edu/metadata/ncsa.wordcount#words", - "characters": "http://clowder.ncsa.illinois.edu/metadata/ncsa.wordcount#characters" - } - ], - "repository": [ - { - "repType": "git", - "repUrl": "https://opensource.ncsa.illinois.edu/stash/scm/cats/pyclowder.git" - } - ], - "process": { - "file": [ - "text/*", - "application/json" - ] - }, - "external_services": [], - "dependencies": [], - "bibtex": [] + "@context": "http://clowder.ncsa.illinois.edu/contexts/extractors.jsonld", + "name": "ncsa.wordcount", + "version": "2.0", + "description": "WordCount extractor. Counts the number of characters, words and lines in the text file that was uploaded.", + "author": "Rob Kooper ", + "contributors": [], + "contexts": [ + { + "lines": "http://clowder.ncsa.illinois.edu/metadata/ncsa.wordcount#lines", + "words": "http://clowder.ncsa.illinois.edu/metadata/ncsa.wordcount#words", + "characters": "http://clowder.ncsa.illinois.edu/metadata/ncsa.wordcount#characters", + } + ], + "repository": [ + { + "repType": "git", + "repUrl": "https://opensource.ncsa.illinois.edu/stash/scm/cats/pyclowder.git", + } + ], + "process": {"file": ["text/*", "application/json"]}, + "external_services": [], + "dependencies": [], + "bibtex": [], } # extractor_info_file = os.path.join(os.getcwd(), 'extractor_info.json') @@ -51,6 +46,7 @@ def test_register(client: TestClient, headers: dict): assert response.json().get("id") is not None assert response.status_code == 200 + def test_get_one(client: TestClient, headers: dict): response = client.post( f"{settings.API_V2_STR}/extractors", json=extractor_info, headers=headers @@ -64,6 +60,7 @@ def test_get_one(client: TestClient, headers: dict): assert response.status_code == 200 assert response.json().get("id") is not None + def test_delete(client: TestClient, headers: dict): response = client.post( f"{settings.API_V2_STR}/extractors", json=extractor_info, headers=headers @@ -74,4 +71,4 @@ def test_delete(client: TestClient, headers: dict): response = client.delete( f"{settings.API_V2_STR}/extractors/{extractor_id}", headers=headers ) - assert response.status_code == 200 \ No newline at end of file + assert response.status_code == 200 From 1a5db772b96cd4910da6c5f11022ee2e7afe609e Mon Sep 17 00:00:00 2001 From: toddn Date: Wed, 12 Oct 2022 09:09:10 -0500 Subject: [PATCH 15/34] comments in methods --- backend/app/routers/datasets.py | 4 +++- backend/app/routers/files.py | 7 ++++--- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/backend/app/routers/datasets.py b/backend/app/routers/datasets.py index 6fe60dad4..475b40ffc 100644 --- a/backend/app/routers/datasets.py +++ b/backend/app/routers/datasets.py @@ -686,7 +686,8 @@ async def download_dataset( else: raise HTTPException(status_code=404, detail=f"Dataset {dataset_id} not found") - +# submits file to extractor +# can handle parameeters pass in as key/values in info @router.post("/{dataset_id}/extract") async def get_dataset_extract( dataset_id: str, @@ -710,6 +711,7 @@ async def get_dataset_extract( body = {} body["secretKey"] = token body["token"] = token + # TODO better solution for host body["host"] = "http://127.0.0.1:8000" body["retry_count"] = 0 body["filename"] = dataset["name"] diff --git a/backend/app/routers/files.py b/backend/app/routers/files.py index d67e41905..03c0c1d7a 100644 --- a/backend/app/routers/files.py +++ b/backend/app/routers/files.py @@ -249,7 +249,8 @@ async def get_file_versions( raise HTTPException(status_code=404, detail=f"File {file_id} not found") - +# submits file to extractor +# can handle parameeters pass in as key/values in info @router.post("/{file_id}/extract") async def get_file_extract( file_id: str, @@ -270,14 +271,14 @@ async def get_file_extract( # TODO check if extractor is registered msg = {"message": "testing", "file_id": file_id} body = {} + # TODO better solution for host body["host"] = "http://127.0.0.1:8000" - body["secretKey"] = "secretKey" + body["secretKey"] = token body["token"] = token body["retry_count"] = 0 body["filename"] = file["name"] body["id"] = file_id body["datasetId"] = str(file["dataset_id"]) - body["host"] = "http://127.0.0.1:8000" body["secretKey"] = token body["fileSize"] = file["bytes"] body["resource_type"] = "file" From 0dcb64cb49c5f8b1ee30a996d0e9e0b15f69661d Mon Sep 17 00:00:00 2001 From: toddn Date: Wed, 12 Oct 2022 09:09:49 -0500 Subject: [PATCH 16/34] black formatting --- backend/app/routers/datasets.py | 1 + backend/app/routers/files.py | 1 + 2 files changed, 2 insertions(+) diff --git a/backend/app/routers/datasets.py b/backend/app/routers/datasets.py index 475b40ffc..abdcde93e 100644 --- a/backend/app/routers/datasets.py +++ b/backend/app/routers/datasets.py @@ -686,6 +686,7 @@ async def download_dataset( else: raise HTTPException(status_code=404, detail=f"Dataset {dataset_id} not found") + # submits file to extractor # can handle parameeters pass in as key/values in info @router.post("/{dataset_id}/extract") diff --git a/backend/app/routers/files.py b/backend/app/routers/files.py index 03c0c1d7a..3209b1b5f 100644 --- a/backend/app/routers/files.py +++ b/backend/app/routers/files.py @@ -249,6 +249,7 @@ async def get_file_versions( raise HTTPException(status_code=404, detail=f"File {file_id} not found") + # submits file to extractor # can handle parameeters pass in as key/values in info @router.post("/{file_id}/extract") From c2fc039f40b6da6761fea27ed2d628a612d3c729 Mon Sep 17 00:00:00 2001 From: toddn Date: Wed, 12 Oct 2022 12:25:33 -0500 Subject: [PATCH 17/34] new classes --- backend/app/models/feeds.py | 0 backend/app/models/listeners.py | 0 backend/app/models/search.py | 0 backend/app/rabbitmq/listeners.py | 0 backend/app/routers/feeds.py | 0 backend/app/routers/listeners.py | 0 6 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 backend/app/models/feeds.py create mode 100644 backend/app/models/listeners.py create mode 100644 backend/app/models/search.py create mode 100644 backend/app/rabbitmq/listeners.py create mode 100644 backend/app/routers/feeds.py create mode 100644 backend/app/routers/listeners.py diff --git a/backend/app/models/feeds.py b/backend/app/models/feeds.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/app/models/listeners.py b/backend/app/models/listeners.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/app/models/search.py b/backend/app/models/search.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/app/rabbitmq/listeners.py b/backend/app/rabbitmq/listeners.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/app/routers/feeds.py b/backend/app/routers/feeds.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/app/routers/listeners.py b/backend/app/routers/listeners.py new file mode 100644 index 000000000..e69de29bb From 8d67f08637c3250dbc9dd3571d08938ba4102eba Mon Sep 17 00:00:00 2001 From: toddn Date: Wed, 12 Oct 2022 12:27:32 -0500 Subject: [PATCH 18/34] matching listener feed branch --- backend/app/models/feeds.py | 30 +++++++ backend/app/models/listeners.py | 72 +++++++++++++++ backend/app/models/search.py | 25 ++++++ backend/app/rabbitmq/listeners.py | 45 ++++++++++ backend/app/routers/feeds.py | 142 ++++++++++++++++++++++++++++++ backend/app/routers/listeners.py | 142 ++++++++++++++++++++++++++++++ 6 files changed, 456 insertions(+) diff --git a/backend/app/models/feeds.py b/backend/app/models/feeds.py index e69de29bb..a262ca399 100644 --- a/backend/app/models/feeds.py +++ b/backend/app/models/feeds.py @@ -0,0 +1,30 @@ +from datetime import datetime +from pydantic import Field, BaseModel +from typing import Optional, List, Union +from app.models.mongomodel import MongoModel +from app.models.users import UserOut +from app.models.search import SearchObject +from app.models.listeners import ListenerOut, FeedListener + + +class JobFeed(BaseModel): + name: str + search: SearchObject + listeners: List[FeedListener] = [] + + +class FeedBase(JobFeed): + description: str = "" + + +class FeedIn(JobFeed): + pass + + +class FeedDB(JobFeed, MongoModel): + author: UserOut + updated: datetime = Field(default_factory=datetime.utcnow) + + +class FeedOut(FeedDB): + pass diff --git a/backend/app/models/listeners.py b/backend/app/models/listeners.py index e69de29bb..87b3fb146 100644 --- a/backend/app/models/listeners.py +++ b/backend/app/models/listeners.py @@ -0,0 +1,72 @@ +from datetime import datetime +from pydantic import Field, BaseModel +from typing import Optional, List, Union +from app.models.pyobjectid import PyObjectId +from app.models.mongomodel import MongoModel +from app.models.users import UserOut + + +class Repository(MongoModel): + repository_type: str = "git" + repository_url: str = "" + + +# Currently for extractor_info JSON from Clowder v1 extractors POSTing to /api/extractors +class ExtractorInfo(BaseModel): + author: str # Referring to author of listener script (e.g. name or email), not Clowder user + process: dict + maturity: str = "Development" + contributors: List[str] = [] + contexts: List[dict] = [] + repository: List[Repository] = [] + external_services: List[str] = [] + libraries: List[str] = [] + bibtex: List[str] = [] + default_labels: List[str] = [] + categories: List[str] = [] + parameters: List[dict] = [] + + +class ListenerBase(BaseModel): + name: str + version: int = 1 + description: str = "" + + +class ListenerIn(ListenerBase): + pass + + +class LegacyListenerIn(ExtractorInfo): + name: str + version: str = "1.0" + description: str = "" + + +class ListenerDB(ListenerBase, MongoModel): + author: UserOut + created: datetime = Field(default_factory=datetime.utcnow) + modified: datetime = Field(default_factory=datetime.utcnow) + properties: Optional[ExtractorInfo] = None + + +class ListenerOut(ListenerDB): + pass + + +class FeedListener(BaseModel): + listener_id: PyObjectId + automatic: bool # Listeners can trigger automatically or not on a per-feed basis. + + +class ListenerMessage(BaseModel): + host: str = "http://127.0.0.1:8000" + secretKey: str = "secretKey" + retry_count: int = 0 + resource_type: str = "file" + flags: str = "" + filename: str + fileSize: int + id: str + datasetId: str + token: str diff --git a/backend/app/models/search.py b/backend/app/models/search.py index e69de29bb..ac3dbba88 100644 --- a/backend/app/models/search.py +++ b/backend/app/models/search.py @@ -0,0 +1,25 @@ +from datetime import datetime +from pydantic import BaseModel +from typing import Optional, List + + +# This describes what is indexed for a given resource - may eventually be split by index (resource type) +class SearchIndexContents(BaseModel): + id: str + name: str + creator: str # currently just email + created: datetime + download: int + + +class SearchCriteria(BaseModel): + field: str + operator: str = "==" + value: str + + +class SearchObject(BaseModel): + index_name: str + criteria: List[SearchCriteria] = [] + mode: str = "and" # and / or + original: Optional[str] # original un-parsed search string diff --git a/backend/app/rabbitmq/listeners.py b/backend/app/rabbitmq/listeners.py index e69de29bb..1cad3574e 100644 --- a/backend/app/rabbitmq/listeners.py +++ b/backend/app/rabbitmq/listeners.py @@ -0,0 +1,45 @@ +import json +import pika +from fastapi import Request, HTTPException, Depends +from pymongo import MongoClient +from bson import ObjectId +from pika.adapters.blocking_connection import BlockingChannel + +from app.keycloak_auth import get_token +from app import dependencies +from app.models.files import FileOut +from app.models.listeners import ListenerMessage + + +def submit_file_message( + file_out: FileOut, + queue: str, + routing_key: str, + parameters: dict, + token: str = Depends(get_token), + db: MongoClient = Depends(dependencies.get_db), + rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), +): + # TODO check if extractor is registered + msg_body = ListenerMessage( + filename=file_out.name, + fileSize=file_out.bytes, + id=file_out.id, + datasetId=file_out.dataset_id, + secretKey=token, + ) + + rabbitmq_client.queue_bind( + exchange="extractors", + queue=queue, + routing_key=routing_key, + ) + rabbitmq_client.basic_publish( + exchange="extractors", + routing_key=routing_key, + body=json.dumps(msg_body.dict(), ensure_ascii=False), + properties=pika.BasicProperties( + content_type="application/json", delivery_mode=1 + ), + ) + return {"message": "testing", "file_id": file_out.id} diff --git a/backend/app/routers/feeds.py b/backend/app/routers/feeds.py index e69de29bb..d89755a27 100644 --- a/backend/app/routers/feeds.py +++ b/backend/app/routers/feeds.py @@ -0,0 +1,142 @@ +from typing import List +import os +from bson import ObjectId +from fastapi import APIRouter, HTTPException, Depends, Request +from pymongo import MongoClient +import datetime +from app.dependencies import get_db +from app.keycloak_auth import get_user, get_current_user +from app.models.users import UserOut +from app.models.files import FileOut +from app.models.listeners import ( + FeedListener, + ListenerOut, +) +from app.models.feeds import ( + FeedIn, + FeedDB, + FeedOut, +) +from app.models.search import SearchIndexContents +from app.elastic_search.connect import check_search_result +from app.rabbitmq.listeners import submit_file_message + +router = APIRouter() + +clowder_bucket = os.getenv("MINIO_BUCKET_NAME", "clowder") + + +# TODO: Move this to MongoDB middle layer +async def disassociate_listener_db(feed_id: str, listener_id: str, db: MongoClient): + """Remove a specific listener_id from the listeners associated with a feed.""" + async for feed in db["feeds"].find( + {"listeners.listener_id": ObjectId(listener_id)} + ): + feed_db = FeedDB.from_mongo(feed) + new_listeners = [] + for feed_listener in feed_db.listeners: + if feed_listener.listener_id != listener_id: + new_listeners.append(feed_listener) + feed_db.listeners = new_listeners + await db["feeds"].replace_one( + {"_id": ObjectId(feed_id)}, FeedDB(**feed_db).to_mongo() + ) + + +async def check_feed_listeners( + es_client, + file_out: FileOut, + user: UserOut, + db: MongoClient, +): + """Automatically submit new file to listeners on feeds that fit the search criteria.""" + listeners_found = [] + async for feed in db["feeds"].find({"listeners": {"$ne": []}}): + feed_db = FeedDB(**feed) + + # If feed doesn't have any auto-triggering listeners, we're done + found_auto = False + for listener in feed_db.listeners: + if listener.automatic: + found_auto = True + break + + if found_auto: + # Verify whether resource_id is found when searching the specified criteria + feed_match = check_search_result(es_client, file_out, feed_db.search) + if feed_match: + for listener in feed_db.listeners: + if listener.automatic: + listeners_found.append(listener.listener_id) + + for targ_listener in listeners_found: + queue = "" # TODO: Each extractor gets a queue - routing key same as name? + routing_key = "" + parameters = {} + submit_file_message(file_out, queue, routing_key, parameters) + + return listeners_found + + +@router.post("", response_model=FeedOut) +async def save_feed( + feed_in: FeedIn, + user=Depends(get_current_user), + db: MongoClient = Depends(get_db), +): + feed = FeedDB(**feed_in.dict(), author=user) + new_feed = await db["feeds"].insert_one(feed.to_mongo()) + found = await db["feeds"].find_one({"_id": new_feed.inserted_id}) + feed_out = FeedOut.from_mongo(found) + return feed_out + + +@router.delete("/{feed_id}") +async def delete_feed( + feed_id: str, + user=Depends(get_current_user), + db: MongoClient = Depends(get_db), +): + if (await db["feeds"].find_one({"_id": ObjectId(feed_id)})) is not None: + await db["feeds"].delete_one({"_id": ObjectId(feed_id)}) + return {"deleted": feed_id} + else: + raise HTTPException(status_code=404, detail=f"Feed {feed_id} not found") + + +@router.post("/{feed_id}/listeners", response_model=FeedOut) +async def associate_listener( + feed_id: str, + listener: FeedListener, + user=Depends(get_current_user), + db: MongoClient = Depends(get_db), +): + if (feed := await db["feeds"].find_one({"_id": ObjectId(feed_id)})) is not None: + feed_out = FeedOut.from_mongo(feed) + if ( + listener_q := await db["listeners"].find_one( + {"_id": ObjectId(listener.listener_id)} + ) + ) is not None: + feed_out.listeners.append(listener) + await db["feeds"].replace_one( + {"_id": ObjectId(feed_id)}, FeedDB(**feed_out.dict()).to_mongo() + ) + return feed_out + raise HTTPException( + status_code=404, detail=f"listener {listener.listener_id} not found" + ) + raise HTTPException(status_code=404, detail=f"feed {feed_id} not found") + + +@router.delete("/{feed_id}/listeners/{listener_id}", response_model=FeedOut) +async def disassociate_listener( + feed_id: str, + listener_id: str, + user=Depends(get_current_user), + db: MongoClient = Depends(get_db), +): + if (feed := await db["feeds"].find_one({"_id": ObjectId(feed_id)})) is not None: + disassociate_listener_db(feed_id, listener_id, db) + return {"disassociated": listener_id} + raise HTTPException(status_code=404, detail=f"feed {feed_id} not found") diff --git a/backend/app/routers/listeners.py b/backend/app/routers/listeners.py index e69de29bb..2c74e71c4 100644 --- a/backend/app/routers/listeners.py +++ b/backend/app/routers/listeners.py @@ -0,0 +1,142 @@ +from typing import List +import os +from bson import ObjectId +from fastapi import APIRouter, HTTPException, Depends, Request +from pymongo import MongoClient +import datetime +from app.dependencies import get_db +from app.keycloak_auth import get_user, get_current_user +from app.models.listeners import ( + ExtractorInfo, + ListenerIn, + LegacyListenerIn, + ListenerDB, + ListenerOut, +) +from app.models.feeds import FeedOut +from app.routers.feeds import disassociate_listener_db + +router = APIRouter() +legacy_router = APIRouter() # for back-compatibilty with v1 extractors + +clowder_bucket = os.getenv("MINIO_BUCKET_NAME", "clowder") + + +@router.post("", response_model=ListenerOut) +async def save_listener( + listener_in: ListenerIn, + user=Depends(get_current_user), + db: MongoClient = Depends(get_db), +): + listener = ListenerDB(**listener_in.dict(), author=user) + new_listener = await db["listeners"].insert_one(listener.to_mongo()) + found = await db["listeners"].find_one({"_id": new_listener.inserted_id}) + listener_out = ListenerOut.from_mongo(found) + return listener_out + + +@legacy_router.post("", response_model=ListenerOut) +async def save_legacy_listener( + legacy_in: LegacyListenerIn, + user=Depends(get_user), + db: MongoClient = Depends(get_db), +): + """This will take a POST with Clowder v1 extractor_info dict info, and convert to a v2 Listener.""" + listener_properties = ExtractorInfo(**legacy_in.dict) + listener = ListenerDB( + name=legacy_in.name, + version=int(legacy_in.version), + description=legacy_in.description, + author=user, + properties=listener_properties, + ) + new_listener = await db["listeners"].insert_one(listener.to_mongo()) + found = await db["listeners"].find_one({"_id": new_listener.inserted_id}) + listener_out = ListenerOut.from_mongo(found) + + # TODO: Automatically match or create a Feed based on listener_in.process rules + for process_key in listener_properties.process: + if process_key == "file": + mimetypes = listener_properties.process[process_key] + new_feed = { + "name": legacy_in.name + " " + legacy_in.version, + "mode": "or", + "listeners": [{"listener_id": listener_out.id, "automatic": True}], + "criteria": [], + } + for mimetype in mimetypes: + new_feed["criteria"].append( + {"field": "MIMEtype", "operator": "==", "value": mimetype} + ) + + # Save feed + pass + + return listener_out + + +@router.get("/{listener_id}", response_model=ListenerOut) +async def get_listener(listener_id: str, db: MongoClient = Depends(get_db)): + if ( + listener := await db["listeners"].find_one({"_id": ObjectId(listener_id)}) + ) is not None: + return ListenerOut.from_mongo(listener) + raise HTTPException(status_code=404, detail=f"listener {listener_id} not found") + + +@router.get("", response_model=List[ListenerOut]) +async def get_listeners( + user_id=Depends(get_user), + db: MongoClient = Depends(get_db), + skip: int = 0, + limit: int = 2, +): + listeners = [] + for doc in ( + await db["listeners"].find().skip(skip).limit(limit).to_list(length=limit) + ): + listeners.append(ListenerOut.from_mongo(doc)) + return listeners + + +@router.put("/{listener_id}", response_model=ListenerOut) +async def edit_listener( + listener_id: str, + listener_in: ListenerIn, + db: MongoClient = Depends(get_db), + user_id=Depends(get_user), +): + if ( + listener := await db["listeners"].find_one({"_id": ObjectId(listener_id)}) + ) is not None: + # TODO: Refactor this with permissions checks etc. + listener_update = dict(listener_in) if listener_in is not None else {} + user = await db["users"].find_one({"_id": ObjectId(user_id)}) + listener_update["updated"] = datetime.datetime.utcnow() + try: + listener.update(listener_update) + await db["listeners"].replace_one( + {"_id": ObjectId(listener_id)}, ListenerDB(**listener).to_mongo() + ) + except Exception as e: + raise HTTPException(status_code=500, detail=e.args[0]) + return ListenerOut.from_mongo(listener) + raise HTTPException(status_code=404, detail=f"listener {listener_id} not found") + + +@router.delete("/{listener_id}") +async def delete_listener( + listener_id: str, + db: MongoClient = Depends(get_db), +): + if (await db["listeners"].find_one({"_id": ObjectId(listener_id)})) is not None: + # unsubscribe the listener from any feeds + async for feed in db["feeds"].find( + {"listeners.listener_id": ObjectId(listener_id)} + ): + feed_out = FeedOut.from_mongo(feed) + disassociate_listener_db(feed_out.id, listener_id, db) + await db["listeners"].delete_one({"_id": ObjectId(listener_id)}) + return {"deleted": listener_id} + else: + raise HTTPException(status_code=404, detail=f"listener {listener_id} not found") From f45d0d11c572be1bb7bc46ba0df6707aa0d0b444 Mon Sep 17 00:00:00 2001 From: toddn Date: Wed, 12 Oct 2022 12:29:38 -0500 Subject: [PATCH 19/34] more fields necessary for listener to match extractors from v1 --- backend/app/models/listeners.py | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/backend/app/models/listeners.py b/backend/app/models/listeners.py index 87b3fb146..1576fd408 100644 --- a/backend/app/models/listeners.py +++ b/backend/app/models/listeners.py @@ -13,18 +13,21 @@ class Repository(MongoModel): # Currently for extractor_info JSON from Clowder v1 extractors POSTing to /api/extractors class ExtractorInfo(BaseModel): - author: str # Referring to author of listener script (e.g. name or email), not Clowder user - process: dict - maturity: str = "Development" + name: str + version: str = "1.0" + updated: datetime = Field(default_factory=datetime.utcnow) + author: str contributors: List[str] = [] contexts: List[dict] = [] - repository: List[Repository] = [] - external_services: List[str] = [] + repository: Union[list[Repository], None] = None + external_services: List[str] libraries: List[str] = [] - bibtex: List[str] = [] + bibtex: List[str] + maturity: str = "Development" default_labels: List[str] = [] + process: dict categories: List[str] = [] - parameters: List[dict] = [] + parameters: dict = {} class ListenerBase(BaseModel): From 4847a4fcb999f613d55b2dd19a1c70500d66b112 Mon Sep 17 00:00:00 2001 From: toddn Date: Wed, 19 Oct 2022 09:48:31 -0500 Subject: [PATCH 20/34] import pymongo --- backend/app/routers/datasets.py | 1 + 1 file changed, 1 insertion(+) diff --git a/backend/app/routers/datasets.py b/backend/app/routers/datasets.py index d281c175a..654864016 100644 --- a/backend/app/routers/datasets.py +++ b/backend/app/routers/datasets.py @@ -22,6 +22,7 @@ ) from minio import Minio from pika.adapters.blocking_connection import BlockingChannel +import pymongo from pymongo import MongoClient from rocrate.model.person import Person from rocrate.rocrate import ROCrate From 2b000671389ef1dfb0a51ce5d8774f26812053f8 Mon Sep 17 00:00:00 2001 From: toddn Date: Tue, 25 Oct 2022 15:55:05 -0500 Subject: [PATCH 21/34] updating and adding models from codegen beginning of adding listener/extractor to front end --- frontend/src/openapi/v2/index.ts | 4 ++ .../src/openapi/v2/models/ExtractorBase.ts | 25 ++++++++ .../openapi/v2/models/ExtractorIdentifier.ts | 17 +++++- frontend/src/openapi/v2/models/ExtractorIn.ts | 17 +++++- .../src/openapi/v2/models/ExtractorOut.ts | 17 +++++- .../openapi/v2/models/MetadataDefinitionIn.ts | 59 ++++++++++-------- .../v2/models/MetadataDefinitionOut.ts | 61 +++++++++++-------- frontend/src/openapi/v2/models/Repository.ts | 9 +++ .../openapi/v2/services/DatasetsService.ts | 22 ++++++- 9 files changed, 177 insertions(+), 54 deletions(-) create mode 100644 frontend/src/openapi/v2/models/ExtractorBase.ts create mode 100644 frontend/src/openapi/v2/models/Repository.ts diff --git a/frontend/src/openapi/v2/index.ts b/frontend/src/openapi/v2/index.ts index 13cc6a85c..010c00aba 100644 --- a/frontend/src/openapi/v2/index.ts +++ b/frontend/src/openapi/v2/index.ts @@ -15,6 +15,7 @@ export type { DatasetBase } from './models/DatasetBase'; export type { DatasetIn } from './models/DatasetIn'; export type { DatasetOut } from './models/DatasetOut'; export type { DatasetPatch } from './models/DatasetPatch'; +export type { ExtractorBase } from './models/ExtractorBase'; export type { ExtractorIdentifier } from './models/ExtractorIdentifier'; export type { ExtractorIn } from './models/ExtractorIn'; export type { ExtractorOut } from './models/ExtractorOut'; @@ -34,6 +35,7 @@ export type { MetadataIn } from './models/MetadataIn'; export type { MetadataOut } from './models/MetadataOut'; export type { MetadataPatch } from './models/MetadataPatch'; export type { MongoDBRef } from './models/MongoDBRef'; +export type { Repository } from './models/Repository'; export type { UserIn } from './models/UserIn'; export type { UserOut } from './models/UserOut'; export type { ValidationError } from './models/ValidationError'; @@ -41,6 +43,8 @@ export type { ValidationError } from './models/ValidationError'; export { AuthService } from './services/AuthService'; export { CollectionsService } from './services/CollectionsService'; export { DatasetsService } from './services/DatasetsService'; +export { ElasticsearchService } from './services/ElasticsearchService'; +export { ExtractorsService } from './services/ExtractorsService'; export { FilesService } from './services/FilesService'; export { FoldersService } from './services/FoldersService'; export { LoginService } from './services/LoginService'; diff --git a/frontend/src/openapi/v2/models/ExtractorBase.ts b/frontend/src/openapi/v2/models/ExtractorBase.ts new file mode 100644 index 000000000..3ad175003 --- /dev/null +++ b/frontend/src/openapi/v2/models/ExtractorBase.ts @@ -0,0 +1,25 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { Repository } from './Repository'; + +export type ExtractorBase = { + id?: string; + name: string; + version?: string; + updated?: string; + author: string; + contributors?: Array; + contexts?: Array; + repository?: Array; + external_services: Array; + libraries?: Array; + bibtex: Array; + maturity?: string; + default_labels?: Array; + process: any; + categories?: Array; + parameters?: any; + description?: string; +} diff --git a/frontend/src/openapi/v2/models/ExtractorIdentifier.ts b/frontend/src/openapi/v2/models/ExtractorIdentifier.ts index 04a191142..97350a861 100644 --- a/frontend/src/openapi/v2/models/ExtractorIdentifier.ts +++ b/frontend/src/openapi/v2/models/ExtractorIdentifier.ts @@ -2,8 +2,23 @@ /* tslint:disable */ /* eslint-disable */ +import type { Repository } from './Repository'; + export type ExtractorIdentifier = { id?: string; name: string; - version?: number; + version?: string; + updated?: string; + author: string; + contributors?: Array; + contexts?: Array; + repository?: Array; + external_services: Array; + libraries?: Array; + bibtex: Array; + maturity?: string; + default_labels?: Array; + process: any; + categories?: Array; + parameters?: any; } diff --git a/frontend/src/openapi/v2/models/ExtractorIn.ts b/frontend/src/openapi/v2/models/ExtractorIn.ts index 4b050828d..658eace34 100644 --- a/frontend/src/openapi/v2/models/ExtractorIn.ts +++ b/frontend/src/openapi/v2/models/ExtractorIn.ts @@ -2,9 +2,24 @@ /* tslint:disable */ /* eslint-disable */ +import type { Repository } from './Repository'; + export type ExtractorIn = { id?: string; name: string; - version?: number; + version?: string; + updated?: string; + author: string; + contributors?: Array; + contexts?: Array; + repository?: Array; + external_services: Array; + libraries?: Array; + bibtex: Array; + maturity?: string; + default_labels?: Array; + process: any; + categories?: Array; + parameters?: any; description?: string; } diff --git a/frontend/src/openapi/v2/models/ExtractorOut.ts b/frontend/src/openapi/v2/models/ExtractorOut.ts index 5fa12380e..99545fb0e 100644 --- a/frontend/src/openapi/v2/models/ExtractorOut.ts +++ b/frontend/src/openapi/v2/models/ExtractorOut.ts @@ -2,9 +2,24 @@ /* tslint:disable */ /* eslint-disable */ +import type { Repository } from './Repository'; + export type ExtractorOut = { id?: string; name: string; - version?: number; + version?: string; + updated?: string; + author: string; + contributors?: Array; + contexts?: Array; + repository?: Array; + external_services: Array; + libraries?: Array; + bibtex: Array; + maturity?: string; + default_labels?: Array; + process: any; + categories?: Array; + parameters?: any; description?: string; } diff --git a/frontend/src/openapi/v2/models/MetadataDefinitionIn.ts b/frontend/src/openapi/v2/models/MetadataDefinitionIn.ts index b2348f0e6..fbf785cf5 100644 --- a/frontend/src/openapi/v2/models/MetadataDefinitionIn.ts +++ b/frontend/src/openapi/v2/models/MetadataDefinitionIn.ts @@ -9,28 +9,39 @@ import type { MetadataField } from './MetadataField'; * These provide a shorthand for use by extractors as well as a source for building GUI widgets to add new entries. * * Example: { - * "name": "LatLon", - * "description": "A set of Latitude/Longitude coordinates", - * "context": { - * "longitude": "https://schema.org/longitude", - * "latitude": "https://schema.org/latitude" + * "name" : "LatLon", + * "description" : "A set of Latitude/Longitude coordinates", + * "context" : { + * "longitude" : "https://schema.org/longitude", + * "latitude" : "https://schema.org/latitude" * }, - * "fields": [{ - * "name": "longitude", - * "type": "float", - * "required": "True" - * },{ - * "name": "latitude", - * "type": "float", - * "required": "True" - * }] - * } - */ - export type MetadataDefinitionIn = { - id?: string; - name: string; - description?: string; - context?: any; - context_url?: string; - fields: Array; - } + * "fields" : [ + * { + * "name" : "longitude", + * "list" : false, + * "widgetType": "TextField", + * "config": { + * "type" : "float" + * }, + * "required" : true + * }, + * { + * "name" : "latitude", + * "list" : false, + * "widgetType": "TextField", + * "config": { + * "type" : "float" + * }, + * "required" : true + * } + * ] + * } + */ + export type MetadataDefinitionIn = { + id?: string; + name: string; + description?: string; + context?: any; + context_url?: string; + fields: Array; + } diff --git a/frontend/src/openapi/v2/models/MetadataDefinitionOut.ts b/frontend/src/openapi/v2/models/MetadataDefinitionOut.ts index d41409ade..f6801840e 100644 --- a/frontend/src/openapi/v2/models/MetadataDefinitionOut.ts +++ b/frontend/src/openapi/v2/models/MetadataDefinitionOut.ts @@ -10,29 +10,40 @@ import type { UserOut } from './UserOut'; * These provide a shorthand for use by extractors as well as a source for building GUI widgets to add new entries. * * Example: { - * "name": "LatLon", - * "description": "A set of Latitude/Longitude coordinates", - * "context": { - * "longitude": "https://schema.org/longitude", - * "latitude": "https://schema.org/latitude" + * "name" : "LatLon", + * "description" : "A set of Latitude/Longitude coordinates", + * "context" : { + * "longitude" : "https://schema.org/longitude", + * "latitude" : "https://schema.org/latitude" * }, - * "fields": [{ - * "name": "longitude", - * "type": "float", - * "required": "True" - * },{ - * "name": "latitude", - * "type": "float", - * "required": "True" - * }] - * } - */ - export type MetadataDefinitionOut = { - id?: string; - name: string; - description?: string; - context?: any; - context_url?: string; - fields: Array; - creator: UserOut; - } + * "fields" : [ + * { + * "name" : "longitude", + * "list" : false, + * "widgetType": "TextField", + * "config": { + * "type" : "float" + * }, + * "required" : true + * }, + * { + * "name" : "latitude", + * "list" : false, + * "widgetType": "TextField", + * "config": { + * "type" : "float" + * }, + * "required" : true + * } + * ] + * } + */ + export type MetadataDefinitionOut = { + id?: string; + name: string; + description?: string; + context?: any; + context_url?: string; + fields: Array; + creator: UserOut; + } diff --git a/frontend/src/openapi/v2/models/Repository.ts b/frontend/src/openapi/v2/models/Repository.ts new file mode 100644 index 000000000..faa80042c --- /dev/null +++ b/frontend/src/openapi/v2/models/Repository.ts @@ -0,0 +1,9 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +export type Repository = { + id?: string; + repository_type?: string; + repository_url?: string; +} diff --git a/frontend/src/openapi/v2/services/DatasetsService.ts b/frontend/src/openapi/v2/services/DatasetsService.ts index 711fcab0e..8c26b66dd 100644 --- a/frontend/src/openapi/v2/services/DatasetsService.ts +++ b/frontend/src/openapi/v2/services/DatasetsService.ts @@ -174,15 +174,15 @@ export class DatasetsService { /** * Save File * @param datasetId - * @param folderId * @param formData + * @param folderId * @returns FileOut Successful Response * @throws ApiError */ public static saveFileApiV2DatasetsDatasetIdFilesPost( datasetId: string, - folderId: string, formData: Body_save_file_api_v2_datasets__dataset_id__files_post, + folderId?: string, ): CancelablePromise { return __request({ method: 'POST', @@ -301,4 +301,22 @@ export class DatasetsService { }); } + /** + * Get Dataset Extract + * @param datasetId + * @returns any Successful Response + * @throws ApiError + */ + public static getDatasetExtractApiV2DatasetsDatasetIdExtractPost( + datasetId: string, + ): CancelablePromise { + return __request({ + method: 'POST', + path: `/api/v2/datasets/${datasetId}/extract`, + errors: { + 422: `Validation Error`, + }, + }); + } + } \ No newline at end of file From 9f963d474d775d608c61f372b6906d945a048a54 Mon Sep 17 00:00:00 2001 From: toddn Date: Tue, 25 Oct 2022 15:57:39 -0500 Subject: [PATCH 22/34] services added by codegen --- .../v2/services/ElasticsearchService.ts | 57 +++++++++ .../openapi/v2/services/ExtractorsService.ts | 114 ++++++++++++++++++ 2 files changed, 171 insertions(+) create mode 100644 frontend/src/openapi/v2/services/ElasticsearchService.ts create mode 100644 frontend/src/openapi/v2/services/ExtractorsService.ts diff --git a/frontend/src/openapi/v2/services/ElasticsearchService.ts b/frontend/src/openapi/v2/services/ElasticsearchService.ts new file mode 100644 index 000000000..5c09cb9bc --- /dev/null +++ b/frontend/src/openapi/v2/services/ElasticsearchService.ts @@ -0,0 +1,57 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { CancelablePromise } from '../core/CancelablePromise'; +import { request as __request } from '../core/request'; + +export class ElasticsearchService { + + /** + * Search + * @param indexName + * @param query + * @returns string Successful Response + * @throws ApiError + */ + public static searchApiV2ElasticsearchSearchPut( + indexName: string, + query: string, + ): CancelablePromise { + return __request({ + method: 'PUT', + path: `/api/v2/elasticsearch/search`, + query: { + 'index_name': indexName, + 'query': query, + }, + errors: { + 422: `Validation Error`, + }, + }); + } + + /** + * Search File + * @returns any Successful Response + * @throws ApiError + */ + public static searchFileApiV2ElasticsearchFileMsearchPost(): CancelablePromise { + return __request({ + method: 'POST', + path: `/api/v2/elasticsearch/file/_msearch`, + }); + } + + /** + * Search Dataset + * @returns any Successful Response + * @throws ApiError + */ + public static searchDatasetApiV2ElasticsearchDatasetMsearchPost(): CancelablePromise { + return __request({ + method: 'POST', + path: `/api/v2/elasticsearch/dataset/_msearch`, + }); + } + +} \ No newline at end of file diff --git a/frontend/src/openapi/v2/services/ExtractorsService.ts b/frontend/src/openapi/v2/services/ExtractorsService.ts new file mode 100644 index 000000000..199aa5b9b --- /dev/null +++ b/frontend/src/openapi/v2/services/ExtractorsService.ts @@ -0,0 +1,114 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { ExtractorBase } from '../models/ExtractorBase'; +import type { ExtractorIn } from '../models/ExtractorIn'; +import type { ExtractorOut } from '../models/ExtractorOut'; +import type { CancelablePromise } from '../core/CancelablePromise'; +import { request as __request } from '../core/request'; + +export class ExtractorsService { + + /** + * Get Extractors + * @param skip + * @param limit + * @returns ExtractorOut Successful Response + * @throws ApiError + */ + public static getExtractorsApiV2ExtractorsGet( + skip?: number, + limit: number = 2, + ): CancelablePromise> { + return __request({ + method: 'GET', + path: `/api/v2/extractors`, + query: { + 'skip': skip, + 'limit': limit, + }, + errors: { + 422: `Validation Error`, + }, + }); + } + + /** + * Save Extractor + * @param requestBody + * @returns ExtractorOut Successful Response + * @throws ApiError + */ + public static saveExtractorApiV2ExtractorsPost( + requestBody: ExtractorIn, + ): CancelablePromise { + return __request({ + method: 'POST', + path: `/api/v2/extractors`, + body: requestBody, + mediaType: 'application/json', + errors: { + 422: `Validation Error`, + }, + }); + } + + /** + * Get Extractor + * @param extractorId + * @returns ExtractorOut Successful Response + * @throws ApiError + */ + public static getExtractorApiV2ExtractorsExtractorIdGet( + extractorId: string, + ): CancelablePromise { + return __request({ + method: 'GET', + path: `/api/v2/extractors/${extractorId}`, + errors: { + 422: `Validation Error`, + }, + }); + } + + /** + * Edit Extractor + * @param extractorId + * @param requestBody + * @returns ExtractorOut Successful Response + * @throws ApiError + */ + public static editExtractorApiV2ExtractorsExtractorIdPut( + extractorId: string, + requestBody: ExtractorBase, + ): CancelablePromise { + return __request({ + method: 'PUT', + path: `/api/v2/extractors/${extractorId}`, + body: requestBody, + mediaType: 'application/json', + errors: { + 422: `Validation Error`, + }, + }); + } + + /** + * Delete Extractor + * @param extractorId + * @returns any Successful Response + * @throws ApiError + */ + public static deleteExtractorApiV2ExtractorsExtractorIdDelete( + extractorId: string, + ): CancelablePromise { + return __request({ + method: 'DELETE', + path: `/api/v2/extractors/${extractorId}`, + errors: { + 422: `Validation Error`, + }, + }); + } + +} \ No newline at end of file From cec3a3c708befcdee06ae22f8acfe65eabbcdf40 Mon Sep 17 00:00:00 2001 From: toddn Date: Wed, 26 Oct 2022 11:19:10 -0500 Subject: [PATCH 23/34] adding to Explore --- frontend/src/actions/extractors.js | 24 ++++++++++++++++++++++++ frontend/src/components/Explore.tsx | 14 ++++++++++++++ frontend/src/reducers/extractors.ts | 18 ++++++++++++++++++ frontend/src/reducers/index.ts | 4 +++- frontend/src/types/data.ts | 15 ++++++++++++++- 5 files changed, 73 insertions(+), 2 deletions(-) create mode 100644 frontend/src/actions/extractors.js create mode 100644 frontend/src/reducers/extractors.ts diff --git a/frontend/src/actions/extractors.js b/frontend/src/actions/extractors.js new file mode 100644 index 000000000..05fbb7ba4 --- /dev/null +++ b/frontend/src/actions/extractors.js @@ -0,0 +1,24 @@ +import {V2} from "../openapi"; +import {handleErrors} from "./common"; +import config from "../app.config"; +import {getHeader} from "../utils/common"; + +export const RECEIVE_EXTRACTORS = "RECEIVE_EXTRACTORS"; + +export function fetchExtractors(){ + return (dispatch) => { + // TODO: Parameters for dates? paging? + return V2.ExtractorsService.getExtractorsApiV2ExtractorsGet() + .then(json => { + dispatch({ + type: RECEIVE_EXTRACTORS, + extractors: json, + receivedAt: Date.now(), + }); + }) + .catch(reason => { + dispatch(handleErrors(reason, fetchExtractors())); + }); + + }; +} diff --git a/frontend/src/components/Explore.tsx b/frontend/src/components/Explore.tsx index df09ebde0..442b7b213 100644 --- a/frontend/src/components/Explore.tsx +++ b/frontend/src/components/Explore.tsx @@ -4,6 +4,7 @@ import {Box, Button, ButtonGroup, Grid, Tab, Tabs} from "@mui/material"; import {Dataset, RootState} from "../types/data"; import {useDispatch, useSelector} from "react-redux"; import {datasetDeleted, fetchDatasets,} from "../actions/dataset"; +import {fetchExtractors} from "../actions/extractors"; import {resetFailedReason} from "../actions/common"; import {downloadThumbnail} from "../utils/thumbnail"; @@ -31,6 +32,8 @@ export const Explore = (): JSX.Element => { const listDatasets = (skip: number | undefined, limit: number | undefined, mine: boolean | undefined) => dispatch(fetchDatasets(skip, limit, mine)); const dismissError = () => dispatch(resetFailedReason()); const datasets = useSelector((state: RootState) => state.dataset.datasets); + const listExtractors = () => dispatch(fetchExtractors()); + const extractors = useSelector((state: RootState) => state.extractors.extractors); const reason = useSelector((state: RootState) => state.error.reason); const stack = useSelector((state: RootState) => state.error.stack); @@ -49,6 +52,7 @@ export const Explore = (): JSX.Element => { // component did mount useEffect(() => { listDatasets(0, limit, mine); + listExtractors(); }, []); // Error msg dialog @@ -143,6 +147,7 @@ export const Explore = (): JSX.Element => { + @@ -178,6 +183,15 @@ export const Explore = (): JSX.Element => { + + {extractors.map((extractor) => { + return ( + + + + ); + }) + diff --git a/frontend/src/reducers/extractors.ts b/frontend/src/reducers/extractors.ts new file mode 100644 index 000000000..1c91b17eb --- /dev/null +++ b/frontend/src/reducers/extractors.ts @@ -0,0 +1,18 @@ +import {RECEIVE_EXTRACTORS} from "../actions/extractors"; +import {DataAction} from "../types/action"; +import {FolderState} from "../types/data"; + +const defaultState = { + extractors: [] +}; + +const extractors = (state = defaultState, action: DataAction) => { + switch (action.type) { + case RECEIVE_EXTRACTORS: + return Object.assign({}, state, {extractors: action.extractors}); + default: + return state; + } +}; + +export default extractors; diff --git a/frontend/src/reducers/index.ts b/frontend/src/reducers/index.ts index 8936876ec..c61346792 100644 --- a/frontend/src/reducers/index.ts +++ b/frontend/src/reducers/index.ts @@ -5,6 +5,7 @@ import folder from "./folder"; import user from "./user"; import error from "./error"; import metadata from "./metadata"; +import extractors from "./extractors"; const rootReducer = combineReducers({ file: file, @@ -12,7 +13,8 @@ const rootReducer = combineReducers({ folder: folder, user: user, error: error, - metadata: metadata + metadata: metadata, + extractors: extractors }); export default rootReducer; diff --git a/frontend/src/types/data.ts b/frontend/src/types/data.ts index 552cb2443..94e1aafad 100644 --- a/frontend/src/types/data.ts +++ b/frontend/src/types/data.ts @@ -1,7 +1,7 @@ import {MetadataDefinitionOut, MetadataOut as Metadata, FileOut as FileSummary, - FileVersion, FolderOut} from "../openapi/v2"; + FileVersion, FolderOut } from "../openapi/v2"; export interface Dataset { name: string; @@ -17,6 +17,15 @@ export interface Dataset { thumbnail: string; } +export interface Extractor { + name: string; + description: string; + id: string; + parameters: any; +} + + + export interface Author { id: string; email: string; @@ -120,6 +129,9 @@ export interface DatasetState{ newFile: FileSummary; about: Dataset; } +export interface ExtractorState{ + extractors: Extractor[]; +} export interface MetadataState{ metadataDefinitionList: MetadataDefinitionOut[], datasetMetadataList: Metadata[], @@ -156,6 +168,7 @@ export interface RootState { error: ErrorState; file:FileState; dataset:DatasetState; + extractor:ExtractorState; user: UserState; folder: FolderState; } From c4e8ad0ebd63e500b3b0fd147127ba38a567030c Mon Sep 17 00:00:00 2001 From: toddn Date: Thu, 27 Oct 2022 09:53:36 -0500 Subject: [PATCH 24/34] reverting changes --- frontend/src/actions/extractors.js | 24 -------- frontend/src/components/Explore.tsx | 14 ----- frontend/src/openapi/v2/index.ts | 4 -- .../src/openapi/v2/models/ExtractorBase.ts | 25 -------- .../openapi/v2/models/ExtractorIdentifier.ts | 17 +----- frontend/src/openapi/v2/models/ExtractorIn.ts | 17 +----- .../src/openapi/v2/models/ExtractorOut.ts | 17 +----- .../openapi/v2/models/MetadataDefinitionIn.ts | 59 ++++++++---------- .../v2/models/MetadataDefinitionOut.ts | 61 ++++++++----------- frontend/src/openapi/v2/models/Repository.ts | 9 --- .../openapi/v2/services/DatasetsService.ts | 24 +------- frontend/src/reducers/index.ts | 4 +- frontend/src/types/data.ts | 15 +---- 13 files changed, 57 insertions(+), 233 deletions(-) delete mode 100644 frontend/src/actions/extractors.js delete mode 100644 frontend/src/openapi/v2/models/ExtractorBase.ts delete mode 100644 frontend/src/openapi/v2/models/Repository.ts diff --git a/frontend/src/actions/extractors.js b/frontend/src/actions/extractors.js deleted file mode 100644 index 05fbb7ba4..000000000 --- a/frontend/src/actions/extractors.js +++ /dev/null @@ -1,24 +0,0 @@ -import {V2} from "../openapi"; -import {handleErrors} from "./common"; -import config from "../app.config"; -import {getHeader} from "../utils/common"; - -export const RECEIVE_EXTRACTORS = "RECEIVE_EXTRACTORS"; - -export function fetchExtractors(){ - return (dispatch) => { - // TODO: Parameters for dates? paging? - return V2.ExtractorsService.getExtractorsApiV2ExtractorsGet() - .then(json => { - dispatch({ - type: RECEIVE_EXTRACTORS, - extractors: json, - receivedAt: Date.now(), - }); - }) - .catch(reason => { - dispatch(handleErrors(reason, fetchExtractors())); - }); - - }; -} diff --git a/frontend/src/components/Explore.tsx b/frontend/src/components/Explore.tsx index 442b7b213..df09ebde0 100644 --- a/frontend/src/components/Explore.tsx +++ b/frontend/src/components/Explore.tsx @@ -4,7 +4,6 @@ import {Box, Button, ButtonGroup, Grid, Tab, Tabs} from "@mui/material"; import {Dataset, RootState} from "../types/data"; import {useDispatch, useSelector} from "react-redux"; import {datasetDeleted, fetchDatasets,} from "../actions/dataset"; -import {fetchExtractors} from "../actions/extractors"; import {resetFailedReason} from "../actions/common"; import {downloadThumbnail} from "../utils/thumbnail"; @@ -32,8 +31,6 @@ export const Explore = (): JSX.Element => { const listDatasets = (skip: number | undefined, limit: number | undefined, mine: boolean | undefined) => dispatch(fetchDatasets(skip, limit, mine)); const dismissError = () => dispatch(resetFailedReason()); const datasets = useSelector((state: RootState) => state.dataset.datasets); - const listExtractors = () => dispatch(fetchExtractors()); - const extractors = useSelector((state: RootState) => state.extractors.extractors); const reason = useSelector((state: RootState) => state.error.reason); const stack = useSelector((state: RootState) => state.error.stack); @@ -52,7 +49,6 @@ export const Explore = (): JSX.Element => { // component did mount useEffect(() => { listDatasets(0, limit, mine); - listExtractors(); }, []); // Error msg dialog @@ -147,7 +143,6 @@ export const Explore = (): JSX.Element => { - @@ -183,15 +178,6 @@ export const Explore = (): JSX.Element => { - - {extractors.map((extractor) => { - return ( - - - - ); - }) - diff --git a/frontend/src/openapi/v2/index.ts b/frontend/src/openapi/v2/index.ts index 010c00aba..13cc6a85c 100644 --- a/frontend/src/openapi/v2/index.ts +++ b/frontend/src/openapi/v2/index.ts @@ -15,7 +15,6 @@ export type { DatasetBase } from './models/DatasetBase'; export type { DatasetIn } from './models/DatasetIn'; export type { DatasetOut } from './models/DatasetOut'; export type { DatasetPatch } from './models/DatasetPatch'; -export type { ExtractorBase } from './models/ExtractorBase'; export type { ExtractorIdentifier } from './models/ExtractorIdentifier'; export type { ExtractorIn } from './models/ExtractorIn'; export type { ExtractorOut } from './models/ExtractorOut'; @@ -35,7 +34,6 @@ export type { MetadataIn } from './models/MetadataIn'; export type { MetadataOut } from './models/MetadataOut'; export type { MetadataPatch } from './models/MetadataPatch'; export type { MongoDBRef } from './models/MongoDBRef'; -export type { Repository } from './models/Repository'; export type { UserIn } from './models/UserIn'; export type { UserOut } from './models/UserOut'; export type { ValidationError } from './models/ValidationError'; @@ -43,8 +41,6 @@ export type { ValidationError } from './models/ValidationError'; export { AuthService } from './services/AuthService'; export { CollectionsService } from './services/CollectionsService'; export { DatasetsService } from './services/DatasetsService'; -export { ElasticsearchService } from './services/ElasticsearchService'; -export { ExtractorsService } from './services/ExtractorsService'; export { FilesService } from './services/FilesService'; export { FoldersService } from './services/FoldersService'; export { LoginService } from './services/LoginService'; diff --git a/frontend/src/openapi/v2/models/ExtractorBase.ts b/frontend/src/openapi/v2/models/ExtractorBase.ts deleted file mode 100644 index 3ad175003..000000000 --- a/frontend/src/openapi/v2/models/ExtractorBase.ts +++ /dev/null @@ -1,25 +0,0 @@ -/* istanbul ignore file */ -/* tslint:disable */ -/* eslint-disable */ - -import type { Repository } from './Repository'; - -export type ExtractorBase = { - id?: string; - name: string; - version?: string; - updated?: string; - author: string; - contributors?: Array; - contexts?: Array; - repository?: Array; - external_services: Array; - libraries?: Array; - bibtex: Array; - maturity?: string; - default_labels?: Array; - process: any; - categories?: Array; - parameters?: any; - description?: string; -} diff --git a/frontend/src/openapi/v2/models/ExtractorIdentifier.ts b/frontend/src/openapi/v2/models/ExtractorIdentifier.ts index 97350a861..04a191142 100644 --- a/frontend/src/openapi/v2/models/ExtractorIdentifier.ts +++ b/frontend/src/openapi/v2/models/ExtractorIdentifier.ts @@ -2,23 +2,8 @@ /* tslint:disable */ /* eslint-disable */ -import type { Repository } from './Repository'; - export type ExtractorIdentifier = { id?: string; name: string; - version?: string; - updated?: string; - author: string; - contributors?: Array; - contexts?: Array; - repository?: Array; - external_services: Array; - libraries?: Array; - bibtex: Array; - maturity?: string; - default_labels?: Array; - process: any; - categories?: Array; - parameters?: any; + version?: number; } diff --git a/frontend/src/openapi/v2/models/ExtractorIn.ts b/frontend/src/openapi/v2/models/ExtractorIn.ts index 658eace34..4b050828d 100644 --- a/frontend/src/openapi/v2/models/ExtractorIn.ts +++ b/frontend/src/openapi/v2/models/ExtractorIn.ts @@ -2,24 +2,9 @@ /* tslint:disable */ /* eslint-disable */ -import type { Repository } from './Repository'; - export type ExtractorIn = { id?: string; name: string; - version?: string; - updated?: string; - author: string; - contributors?: Array; - contexts?: Array; - repository?: Array; - external_services: Array; - libraries?: Array; - bibtex: Array; - maturity?: string; - default_labels?: Array; - process: any; - categories?: Array; - parameters?: any; + version?: number; description?: string; } diff --git a/frontend/src/openapi/v2/models/ExtractorOut.ts b/frontend/src/openapi/v2/models/ExtractorOut.ts index 99545fb0e..5fa12380e 100644 --- a/frontend/src/openapi/v2/models/ExtractorOut.ts +++ b/frontend/src/openapi/v2/models/ExtractorOut.ts @@ -2,24 +2,9 @@ /* tslint:disable */ /* eslint-disable */ -import type { Repository } from './Repository'; - export type ExtractorOut = { id?: string; name: string; - version?: string; - updated?: string; - author: string; - contributors?: Array; - contexts?: Array; - repository?: Array; - external_services: Array; - libraries?: Array; - bibtex: Array; - maturity?: string; - default_labels?: Array; - process: any; - categories?: Array; - parameters?: any; + version?: number; description?: string; } diff --git a/frontend/src/openapi/v2/models/MetadataDefinitionIn.ts b/frontend/src/openapi/v2/models/MetadataDefinitionIn.ts index fbf785cf5..b2348f0e6 100644 --- a/frontend/src/openapi/v2/models/MetadataDefinitionIn.ts +++ b/frontend/src/openapi/v2/models/MetadataDefinitionIn.ts @@ -9,39 +9,28 @@ import type { MetadataField } from './MetadataField'; * These provide a shorthand for use by extractors as well as a source for building GUI widgets to add new entries. * * Example: { - * "name" : "LatLon", - * "description" : "A set of Latitude/Longitude coordinates", - * "context" : { - * "longitude" : "https://schema.org/longitude", - * "latitude" : "https://schema.org/latitude" + * "name": "LatLon", + * "description": "A set of Latitude/Longitude coordinates", + * "context": { + * "longitude": "https://schema.org/longitude", + * "latitude": "https://schema.org/latitude" * }, - * "fields" : [ - * { - * "name" : "longitude", - * "list" : false, - * "widgetType": "TextField", - * "config": { - * "type" : "float" - * }, - * "required" : true - * }, - * { - * "name" : "latitude", - * "list" : false, - * "widgetType": "TextField", - * "config": { - * "type" : "float" - * }, - * "required" : true - * } - * ] - * } - */ - export type MetadataDefinitionIn = { - id?: string; - name: string; - description?: string; - context?: any; - context_url?: string; - fields: Array; - } + * "fields": [{ + * "name": "longitude", + * "type": "float", + * "required": "True" + * },{ + * "name": "latitude", + * "type": "float", + * "required": "True" + * }] + * } + */ + export type MetadataDefinitionIn = { + id?: string; + name: string; + description?: string; + context?: any; + context_url?: string; + fields: Array; + } diff --git a/frontend/src/openapi/v2/models/MetadataDefinitionOut.ts b/frontend/src/openapi/v2/models/MetadataDefinitionOut.ts index f6801840e..d41409ade 100644 --- a/frontend/src/openapi/v2/models/MetadataDefinitionOut.ts +++ b/frontend/src/openapi/v2/models/MetadataDefinitionOut.ts @@ -10,40 +10,29 @@ import type { UserOut } from './UserOut'; * These provide a shorthand for use by extractors as well as a source for building GUI widgets to add new entries. * * Example: { - * "name" : "LatLon", - * "description" : "A set of Latitude/Longitude coordinates", - * "context" : { - * "longitude" : "https://schema.org/longitude", - * "latitude" : "https://schema.org/latitude" + * "name": "LatLon", + * "description": "A set of Latitude/Longitude coordinates", + * "context": { + * "longitude": "https://schema.org/longitude", + * "latitude": "https://schema.org/latitude" * }, - * "fields" : [ - * { - * "name" : "longitude", - * "list" : false, - * "widgetType": "TextField", - * "config": { - * "type" : "float" - * }, - * "required" : true - * }, - * { - * "name" : "latitude", - * "list" : false, - * "widgetType": "TextField", - * "config": { - * "type" : "float" - * }, - * "required" : true - * } - * ] - * } - */ - export type MetadataDefinitionOut = { - id?: string; - name: string; - description?: string; - context?: any; - context_url?: string; - fields: Array; - creator: UserOut; - } + * "fields": [{ + * "name": "longitude", + * "type": "float", + * "required": "True" + * },{ + * "name": "latitude", + * "type": "float", + * "required": "True" + * }] + * } + */ + export type MetadataDefinitionOut = { + id?: string; + name: string; + description?: string; + context?: any; + context_url?: string; + fields: Array; + creator: UserOut; + } diff --git a/frontend/src/openapi/v2/models/Repository.ts b/frontend/src/openapi/v2/models/Repository.ts deleted file mode 100644 index faa80042c..000000000 --- a/frontend/src/openapi/v2/models/Repository.ts +++ /dev/null @@ -1,9 +0,0 @@ -/* istanbul ignore file */ -/* tslint:disable */ -/* eslint-disable */ - -export type Repository = { - id?: string; - repository_type?: string; - repository_url?: string; -} diff --git a/frontend/src/openapi/v2/services/DatasetsService.ts b/frontend/src/openapi/v2/services/DatasetsService.ts index 8c26b66dd..f16f5a35b 100644 --- a/frontend/src/openapi/v2/services/DatasetsService.ts +++ b/frontend/src/openapi/v2/services/DatasetsService.ts @@ -174,15 +174,15 @@ export class DatasetsService { /** * Save File * @param datasetId - * @param formData * @param folderId + * @param formData * @returns FileOut Successful Response * @throws ApiError */ public static saveFileApiV2DatasetsDatasetIdFilesPost( datasetId: string, + folderId: string, formData: Body_save_file_api_v2_datasets__dataset_id__files_post, - folderId?: string, ): CancelablePromise { return __request({ method: 'POST', @@ -301,22 +301,4 @@ export class DatasetsService { }); } - /** - * Get Dataset Extract - * @param datasetId - * @returns any Successful Response - * @throws ApiError - */ - public static getDatasetExtractApiV2DatasetsDatasetIdExtractPost( - datasetId: string, - ): CancelablePromise { - return __request({ - method: 'POST', - path: `/api/v2/datasets/${datasetId}/extract`, - errors: { - 422: `Validation Error`, - }, - }); - } - -} \ No newline at end of file +} diff --git a/frontend/src/reducers/index.ts b/frontend/src/reducers/index.ts index c61346792..8936876ec 100644 --- a/frontend/src/reducers/index.ts +++ b/frontend/src/reducers/index.ts @@ -5,7 +5,6 @@ import folder from "./folder"; import user from "./user"; import error from "./error"; import metadata from "./metadata"; -import extractors from "./extractors"; const rootReducer = combineReducers({ file: file, @@ -13,8 +12,7 @@ const rootReducer = combineReducers({ folder: folder, user: user, error: error, - metadata: metadata, - extractors: extractors + metadata: metadata }); export default rootReducer; diff --git a/frontend/src/types/data.ts b/frontend/src/types/data.ts index 94e1aafad..552cb2443 100644 --- a/frontend/src/types/data.ts +++ b/frontend/src/types/data.ts @@ -1,7 +1,7 @@ import {MetadataDefinitionOut, MetadataOut as Metadata, FileOut as FileSummary, - FileVersion, FolderOut } from "../openapi/v2"; + FileVersion, FolderOut} from "../openapi/v2"; export interface Dataset { name: string; @@ -17,15 +17,6 @@ export interface Dataset { thumbnail: string; } -export interface Extractor { - name: string; - description: string; - id: string; - parameters: any; -} - - - export interface Author { id: string; email: string; @@ -129,9 +120,6 @@ export interface DatasetState{ newFile: FileSummary; about: Dataset; } -export interface ExtractorState{ - extractors: Extractor[]; -} export interface MetadataState{ metadataDefinitionList: MetadataDefinitionOut[], datasetMetadataList: Metadata[], @@ -168,7 +156,6 @@ export interface RootState { error: ErrorState; file:FileState; dataset:DatasetState; - extractor:ExtractorState; user: UserState; folder: FolderState; } From 2bbfdd5314e11d39df3bbc2a64141fb68fa78a0c Mon Sep 17 00:00:00 2001 From: toddn Date: Thu, 27 Oct 2022 09:55:07 -0500 Subject: [PATCH 25/34] reverting --- .../v2/services/ElasticsearchService.ts | 57 --------- .../openapi/v2/services/ExtractorsService.ts | 114 ------------------ frontend/src/reducers/extractors.ts | 18 --- 3 files changed, 189 deletions(-) delete mode 100644 frontend/src/openapi/v2/services/ElasticsearchService.ts delete mode 100644 frontend/src/openapi/v2/services/ExtractorsService.ts delete mode 100644 frontend/src/reducers/extractors.ts diff --git a/frontend/src/openapi/v2/services/ElasticsearchService.ts b/frontend/src/openapi/v2/services/ElasticsearchService.ts deleted file mode 100644 index 5c09cb9bc..000000000 --- a/frontend/src/openapi/v2/services/ElasticsearchService.ts +++ /dev/null @@ -1,57 +0,0 @@ -/* istanbul ignore file */ -/* tslint:disable */ -/* eslint-disable */ -import type { CancelablePromise } from '../core/CancelablePromise'; -import { request as __request } from '../core/request'; - -export class ElasticsearchService { - - /** - * Search - * @param indexName - * @param query - * @returns string Successful Response - * @throws ApiError - */ - public static searchApiV2ElasticsearchSearchPut( - indexName: string, - query: string, - ): CancelablePromise { - return __request({ - method: 'PUT', - path: `/api/v2/elasticsearch/search`, - query: { - 'index_name': indexName, - 'query': query, - }, - errors: { - 422: `Validation Error`, - }, - }); - } - - /** - * Search File - * @returns any Successful Response - * @throws ApiError - */ - public static searchFileApiV2ElasticsearchFileMsearchPost(): CancelablePromise { - return __request({ - method: 'POST', - path: `/api/v2/elasticsearch/file/_msearch`, - }); - } - - /** - * Search Dataset - * @returns any Successful Response - * @throws ApiError - */ - public static searchDatasetApiV2ElasticsearchDatasetMsearchPost(): CancelablePromise { - return __request({ - method: 'POST', - path: `/api/v2/elasticsearch/dataset/_msearch`, - }); - } - -} \ No newline at end of file diff --git a/frontend/src/openapi/v2/services/ExtractorsService.ts b/frontend/src/openapi/v2/services/ExtractorsService.ts deleted file mode 100644 index 199aa5b9b..000000000 --- a/frontend/src/openapi/v2/services/ExtractorsService.ts +++ /dev/null @@ -1,114 +0,0 @@ -/* istanbul ignore file */ -/* tslint:disable */ -/* eslint-disable */ -import type { ExtractorBase } from '../models/ExtractorBase'; -import type { ExtractorIn } from '../models/ExtractorIn'; -import type { ExtractorOut } from '../models/ExtractorOut'; -import type { CancelablePromise } from '../core/CancelablePromise'; -import { request as __request } from '../core/request'; - -export class ExtractorsService { - - /** - * Get Extractors - * @param skip - * @param limit - * @returns ExtractorOut Successful Response - * @throws ApiError - */ - public static getExtractorsApiV2ExtractorsGet( - skip?: number, - limit: number = 2, - ): CancelablePromise> { - return __request({ - method: 'GET', - path: `/api/v2/extractors`, - query: { - 'skip': skip, - 'limit': limit, - }, - errors: { - 422: `Validation Error`, - }, - }); - } - - /** - * Save Extractor - * @param requestBody - * @returns ExtractorOut Successful Response - * @throws ApiError - */ - public static saveExtractorApiV2ExtractorsPost( - requestBody: ExtractorIn, - ): CancelablePromise { - return __request({ - method: 'POST', - path: `/api/v2/extractors`, - body: requestBody, - mediaType: 'application/json', - errors: { - 422: `Validation Error`, - }, - }); - } - - /** - * Get Extractor - * @param extractorId - * @returns ExtractorOut Successful Response - * @throws ApiError - */ - public static getExtractorApiV2ExtractorsExtractorIdGet( - extractorId: string, - ): CancelablePromise { - return __request({ - method: 'GET', - path: `/api/v2/extractors/${extractorId}`, - errors: { - 422: `Validation Error`, - }, - }); - } - - /** - * Edit Extractor - * @param extractorId - * @param requestBody - * @returns ExtractorOut Successful Response - * @throws ApiError - */ - public static editExtractorApiV2ExtractorsExtractorIdPut( - extractorId: string, - requestBody: ExtractorBase, - ): CancelablePromise { - return __request({ - method: 'PUT', - path: `/api/v2/extractors/${extractorId}`, - body: requestBody, - mediaType: 'application/json', - errors: { - 422: `Validation Error`, - }, - }); - } - - /** - * Delete Extractor - * @param extractorId - * @returns any Successful Response - * @throws ApiError - */ - public static deleteExtractorApiV2ExtractorsExtractorIdDelete( - extractorId: string, - ): CancelablePromise { - return __request({ - method: 'DELETE', - path: `/api/v2/extractors/${extractorId}`, - errors: { - 422: `Validation Error`, - }, - }); - } - -} \ No newline at end of file diff --git a/frontend/src/reducers/extractors.ts b/frontend/src/reducers/extractors.ts deleted file mode 100644 index 1c91b17eb..000000000 --- a/frontend/src/reducers/extractors.ts +++ /dev/null @@ -1,18 +0,0 @@ -import {RECEIVE_EXTRACTORS} from "../actions/extractors"; -import {DataAction} from "../types/action"; -import {FolderState} from "../types/data"; - -const defaultState = { - extractors: [] -}; - -const extractors = (state = defaultState, action: DataAction) => { - switch (action.type) { - case RECEIVE_EXTRACTORS: - return Object.assign({}, state, {extractors: action.extractors}); - default: - return state; - } -}; - -export default extractors; From bdc8dacbf6f4b1f182c197ff21106f2d0949ae2f Mon Sep 17 00:00:00 2001 From: toddn Date: Thu, 27 Oct 2022 14:14:40 -0500 Subject: [PATCH 26/34] merging and fixing to work with existing code --- backend/app/rabbitmq/listeners.py | 34 ++++++++++++++++++++ backend/app/routers/files.py | 53 +++++++------------------------ 2 files changed, 46 insertions(+), 41 deletions(-) diff --git a/backend/app/rabbitmq/listeners.py b/backend/app/rabbitmq/listeners.py index 0ba7db533..467d1326c 100644 --- a/backend/app/rabbitmq/listeners.py +++ b/backend/app/rabbitmq/listeners.py @@ -8,6 +8,7 @@ from app.keycloak_auth import get_token from app import dependencies from app.models.files import FileOut +from app.models.datasets import DatasetOut from app.models.listeners import EventListenerMessage @@ -43,3 +44,36 @@ def submit_file_message( ), ) return {"message": "testing", "file_id": file_out.id} + +def submit_dataset_message( + dataset_out: DatasetOut, + queue: str, + routing_key: str, + parameters: dict, + token: str = Depends(get_token), + db: MongoClient = Depends(dependencies.get_db), + rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), +): + # TODO check if extractor is registered + msg_body = EventListenerMessage( + filename=dataset_out.name, + fileSize=dataset_out.bytes, + id=dataset_out.id, + datasetId=dataset_out.dataset_id, + secretKey=token, + ) + + rabbitmq_client.queue_bind( + exchange="extractors", + queue=queue, + routing_key=routing_key, + ) + rabbitmq_client.basic_publish( + exchange="extractors", + routing_key=routing_key, + body=json.dumps(msg_body.dict(), ensure_ascii=False), + properties=pika.BasicProperties( + content_type="application/json", delivery_mode=1 + ), + ) + return {"message": "testing", "dataset_id": dataset_out.id} diff --git a/backend/app/routers/files.py b/backend/app/routers/files.py index e4299c4a0..7d5d22370 100644 --- a/backend/app/routers/files.py +++ b/backend/app/routers/files.py @@ -314,46 +314,17 @@ async def get_file_extract( req_headers = info.headers raw = req_headers.raw authorization = raw[1] - token = authorization[1].decode("utf-8") - token = token.lstrip("Bearer") - token = token.lstrip(" ") - req_info = await info.json() - if "extractor" in req_info: - # TODO check if extractor is registered - msg = {"message": "testing", "file_id": file_id} - body = {} - # TODO better solution for host - body["host"] = "http://127.0.0.1:8000" - body["secretKey"] = token - body["token"] = token - body["retry_count"] = 0 - body["filename"] = file["name"] - body["id"] = file_id - body["datasetId"] = str(file["dataset_id"]) - body["secretKey"] = token - body["fileSize"] = file["bytes"] - body["resource_type"] = "file" - body["flags"] = "" - current_queue = req_info["extractor"] - if "parameters" in req_info: - current_parameters = req_info["parameters"] - body["parameters"] = current_parameters - current_routing_key = "extractors." + current_queue - rabbitmq_client.queue_bind( - exchange="extractors", - queue=current_queue, - routing_key=current_routing_key, - ) - rabbitmq_client.basic_publish( - exchange="extractors", - routing_key=current_routing_key, - body=json.dumps(body, ensure_ascii=False), - properties=pika.BasicProperties( - content_type="application/json", delivery_mode=1 - ), - ) - return msg - else: - raise HTTPException(status_code=404, detail=f"No extractor submitted") + token = authorization[1].decode("utf-8").lstrip("Bearer").lstrip(" ") + + queue = req_info["extractor"] + if "parameters" in req_info: + parameters = req_info["parameters"] + routing_key = "extractors." + queue + + submit_file_message( + file_out, queue, routing_key, parameters, token, db, rabbitmq_client + ) + + return {"message": "testing", "file_id": file_id} else: raise HTTPException(status_code=404, detail=f"File {file_id} not found") From 985cb5e70b62a3f5fb8dd2a4933a8b8d4f7f6b10 Mon Sep 17 00:00:00 2001 From: toddn Date: Thu, 27 Oct 2022 14:19:12 -0500 Subject: [PATCH 27/34] pipenv run black app formatting --- backend/app/rabbitmq/listeners.py | 1 + 1 file changed, 1 insertion(+) diff --git a/backend/app/rabbitmq/listeners.py b/backend/app/rabbitmq/listeners.py index 467d1326c..ba05d5665 100644 --- a/backend/app/rabbitmq/listeners.py +++ b/backend/app/rabbitmq/listeners.py @@ -45,6 +45,7 @@ def submit_file_message( ) return {"message": "testing", "file_id": file_out.id} + def submit_dataset_message( dataset_out: DatasetOut, queue: str, From f447ba55cc135278c52d7222226a7869d13a291e Mon Sep 17 00:00:00 2001 From: toddn Date: Thu, 27 Oct 2022 14:28:11 -0500 Subject: [PATCH 28/34] use listeners not extractors, routers have changed names --- backend/app/tests/test_extractors.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/backend/app/tests/test_extractors.py b/backend/app/tests/test_extractors.py index 00428403a..b469f7f39 100644 --- a/backend/app/tests/test_extractors.py +++ b/backend/app/tests/test_extractors.py @@ -41,7 +41,7 @@ def test_register(client: TestClient, headers: dict): response = client.post( - f"{settings.API_V2_STR}/extractors", json=extractor_info, headers=headers + f"{settings.API_V2_STR}/listeners", json=extractor_info, headers=headers ) assert response.json().get("id") is not None assert response.status_code == 200 @@ -49,13 +49,13 @@ def test_register(client: TestClient, headers: dict): def test_get_one(client: TestClient, headers: dict): response = client.post( - f"{settings.API_V2_STR}/extractors", json=extractor_info, headers=headers + f"{settings.API_V2_STR}/listeners", json=extractor_info, headers=headers ) assert response.status_code == 200 assert response.json().get("id") is not None extractor_id = response.json().get("id") response = client.get( - f"{settings.API_V2_STR}/extractors/{extractor_id}", headers=headers + f"{settings.API_V2_STR}/listeners/{extractor_id}", headers=headers ) assert response.status_code == 200 assert response.json().get("id") is not None @@ -63,12 +63,12 @@ def test_get_one(client: TestClient, headers: dict): def test_delete(client: TestClient, headers: dict): response = client.post( - f"{settings.API_V2_STR}/extractors", json=extractor_info, headers=headers + f"{settings.API_V2_STR}/listeners", json=extractor_info, headers=headers ) assert response.status_code == 200 assert response.json().get("id") is not None extractor_id = response.json().get("id") response = client.delete( - f"{settings.API_V2_STR}/extractors/{extractor_id}", headers=headers + f"{settings.API_V2_STR}/listeners/{extractor_id}", headers=headers ) assert response.status_code == 200 From 8258af8c0eb97b9949569d18ae332e259ea9e1e1 Mon Sep 17 00:00:00 2001 From: toddn Date: Thu, 27 Oct 2022 14:50:29 -0500 Subject: [PATCH 29/34] str for author for extractor --- backend/app/models/listeners.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/backend/app/models/listeners.py b/backend/app/models/listeners.py index 9cfc24a16..018b56de0 100644 --- a/backend/app/models/listeners.py +++ b/backend/app/models/listeners.py @@ -34,7 +34,7 @@ class EventListenerBase(BaseModel): """An Event Listener is the expanded version of v1 Extractors.""" name: str - version: int = 1 + version: str = "1.0" description: str = "" @@ -55,7 +55,7 @@ class LegacyEventListenerIn(ExtractorInfo): class EventListenerDB(EventListenerBase, MongoModel): """EventListeners have a name, version, author, description, and optionally properties where extractor_info will be saved.""" - author: UserOut + author: str created: datetime = Field(default_factory=datetime.utcnow) modified: datetime = Field(default_factory=datetime.utcnow) properties: Optional[ExtractorInfo] = None From a1e16cc7c6f5023d89936eb67a9fdeb24720b1a6 Mon Sep 17 00:00:00 2001 From: toddn Date: Thu, 27 Oct 2022 15:21:22 -0500 Subject: [PATCH 30/34] removing author for extractor info does this fix registration? --- backend/app/models/listeners.py | 2 +- backend/app/tests/test_extractors.py | 4 +--- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/backend/app/models/listeners.py b/backend/app/models/listeners.py index 018b56de0..802c89886 100644 --- a/backend/app/models/listeners.py +++ b/backend/app/models/listeners.py @@ -55,7 +55,7 @@ class LegacyEventListenerIn(ExtractorInfo): class EventListenerDB(EventListenerBase, MongoModel): """EventListeners have a name, version, author, description, and optionally properties where extractor_info will be saved.""" - author: str + author: UserOut created: datetime = Field(default_factory=datetime.utcnow) modified: datetime = Field(default_factory=datetime.utcnow) properties: Optional[ExtractorInfo] = None diff --git a/backend/app/tests/test_extractors.py b/backend/app/tests/test_extractors.py index b469f7f39..0d1ebf71c 100644 --- a/backend/app/tests/test_extractors.py +++ b/backend/app/tests/test_extractors.py @@ -14,9 +14,7 @@ "@context": "http://clowder.ncsa.illinois.edu/contexts/extractors.jsonld", "name": "ncsa.wordcount", "version": "2.0", - "description": "WordCount extractor. Counts the number of characters, words and lines in the text file that was uploaded.", - "author": "Rob Kooper ", - "contributors": [], + "description": "WordCount extractor. Counts the number of characters, words and lines in the text file that was uploaded.", "contributors": [], "contexts": [ { "lines": "http://clowder.ncsa.illinois.edu/metadata/ncsa.wordcount#lines", From c8cf5d370ba33ab6cfc1caae7707d606d4eb5c94 Mon Sep 17 00:00:00 2001 From: toddn Date: Thu, 27 Oct 2022 15:22:49 -0500 Subject: [PATCH 31/34] remove author from the extractor info --- backend/app/tests/test_extractors.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/backend/app/tests/test_extractors.py b/backend/app/tests/test_extractors.py index 0d1ebf71c..74529f2fe 100644 --- a/backend/app/tests/test_extractors.py +++ b/backend/app/tests/test_extractors.py @@ -14,7 +14,8 @@ "@context": "http://clowder.ncsa.illinois.edu/contexts/extractors.jsonld", "name": "ncsa.wordcount", "version": "2.0", - "description": "WordCount extractor. Counts the number of characters, words and lines in the text file that was uploaded.", "contributors": [], + "description": "WordCount extractor. Counts the number of characters, words and lines in the text file that was uploaded.", + "contributors": [], "contexts": [ { "lines": "http://clowder.ncsa.illinois.edu/metadata/ncsa.wordcount#lines", From 95d36089151e40a7e8d76be56e92122474afe41e Mon Sep 17 00:00:00 2001 From: toddn Date: Fri, 28 Oct 2022 11:33:10 -0500 Subject: [PATCH 32/34] using Creator instead of Author to avoid conflict with old extractor fields. Making Optional in case not supplied (registration through rabbitmq heartbeat monitor instead of directly by user) --- backend/app/models/listeners.py | 2 +- backend/app/routers/listeners.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/backend/app/models/listeners.py b/backend/app/models/listeners.py index 802c89886..e9453b8c6 100644 --- a/backend/app/models/listeners.py +++ b/backend/app/models/listeners.py @@ -55,7 +55,7 @@ class LegacyEventListenerIn(ExtractorInfo): class EventListenerDB(EventListenerBase, MongoModel): """EventListeners have a name, version, author, description, and optionally properties where extractor_info will be saved.""" - author: UserOut + creator: Optional[UserOut] = None created: datetime = Field(default_factory=datetime.utcnow) modified: datetime = Field(default_factory=datetime.utcnow) properties: Optional[ExtractorInfo] = None diff --git a/backend/app/routers/listeners.py b/backend/app/routers/listeners.py index f1655c111..83bfd3c4a 100644 --- a/backend/app/routers/listeners.py +++ b/backend/app/routers/listeners.py @@ -29,7 +29,7 @@ async def save_listener( db: MongoClient = Depends(get_db), ): """Register a new Event Listener with the system.""" - listener = EventListenerDB(**listener_in.dict(), author=user) + listener = EventListenerDB(**listener_in.dict(), creator=user) # TODO: Check for duplicates somehow? new_listener = await db["listeners"].insert_one(listener.to_mongo()) found = await db["listeners"].find_one({"_id": new_listener.inserted_id}) @@ -49,7 +49,7 @@ async def save_legacy_listener( name=legacy_in.name, version=int(legacy_in.version), description=legacy_in.description, - author=user, + creator=user, properties=listener_properties, ) new_listener = await db["listeners"].insert_one(listener.to_mongo()) From e12b64ac0c745c4be984809b95c70062bd68f1a1 Mon Sep 17 00:00:00 2001 From: Max Burnette Date: Mon, 31 Oct 2022 10:03:28 -0500 Subject: [PATCH 33/34] Change extractors to listeners consistently --- .../app/rabbitmq/heartbeat_listener_async.py | 27 +++++++------------ .../app/rabbitmq/heartbeat_listener_sync.py | 12 ++++----- backend/app/routers/metadata_files.py | 8 +++--- 3 files changed, 20 insertions(+), 27 deletions(-) diff --git a/backend/app/rabbitmq/heartbeat_listener_async.py b/backend/app/rabbitmq/heartbeat_listener_async.py index 45f628270..df11c6d46 100644 --- a/backend/app/rabbitmq/heartbeat_listener_async.py +++ b/backend/app/rabbitmq/heartbeat_listener_async.py @@ -5,12 +5,7 @@ from app.config import settings from aio_pika.abc import AbstractIncomingMessage from pymongo import MongoClient -from app.models.extractors import ( - ExtractorBase, - ExtractorIn, - ExtractorDB, - ExtractorOut, -) +from app.models.listeners import LegacyEventListenerIn, EventListenerOut async def on_message(message: AbstractIncomingMessage) -> None: @@ -21,20 +16,18 @@ async def on_message(message: AbstractIncomingMessage) -> None: extractor_queue = statusBody["queue"] extractor_info = statusBody["extractor_info"] extractor_name = extractor_info["name"] - extractor_db = ExtractorDB(**extractor_info) + extractor_db = LegacyEventListenerIn(**extractor_info) client = MongoClient(settings.MONGODB_URL) db = client["clowder2"] - existing_extractor = db["extractors"].find_one({"name": extractor_queue}) + existing_extractor = db["listeners"].find_one({"name": extractor_queue}) if existing_extractor is not None: existing_version = existing_extractor["version"] new_version = extractor_db.version if version.parse(new_version) > version.parse(existing_version): - new_extractor = db["extractors"].insert_one(extractor_db.to_mongo()) - found = db["extractors"].find_one({"_id": new_extractor.inserted_id}) - removed = db["extractors"].delete_one( - {"_id": existing_extractor["_id"]} - ) - extractor_out = ExtractorOut.from_mongo(found) + new_extractor = db["listeners"].insert_one(extractor_db.to_mongo()) + found = db["listeners"].find_one({"_id": new_extractor.inserted_id}) + removed = db["listeners"].delete_one({"_id": existing_extractor["_id"]}) + extractor_out = EventListenerOut.from_mongo(found) print( "extractor updated: " + extractor_name @@ -45,9 +38,9 @@ async def on_message(message: AbstractIncomingMessage) -> None: ) return extractor_out else: - new_extractor = db["extractors"].insert_one(extractor_db.to_mongo()) - found = db["extractors"].find_one({"_id": new_extractor.inserted_id}) - extractor_out = ExtractorOut.from_mongo(found) + new_extractor = db["listeners"].insert_one(extractor_db.to_mongo()) + found = db["listeners"].find_one({"_id": new_extractor.inserted_id}) + extractor_out = EventListenerOut.from_mongo(found) print("new extractor registered: " + extractor_name) return extractor_out diff --git a/backend/app/rabbitmq/heartbeat_listener_sync.py b/backend/app/rabbitmq/heartbeat_listener_sync.py index 76eadb19a..f8835238f 100644 --- a/backend/app/rabbitmq/heartbeat_listener_sync.py +++ b/backend/app/rabbitmq/heartbeat_listener_sync.py @@ -19,14 +19,14 @@ def callback(ch, method, properties, body): extractor_db = EventListenerDB(**extractor_info) client = MongoClient(settings.MONGODB_URL) db = client["clowder2"] - existing_extractor = db["extractors"].find_one({"name": extractor_queue}) + existing_extractor = db["listeners"].find_one({"name": extractor_queue}) if existing_extractor is not None: existing_version = existing_extractor["version"] new_version = extractor_db.version if version.parse(new_version) > version.parse(existing_version): - new_extractor = db["extractors"].insert_one(extractor_db.to_mongo()) - found = db["extractors"].find_one({"_id": new_extractor.inserted_id}) - removed = db["extractors"].delete_one({"_id": existing_extractor["_id"]}) + new_extractor = db["listeners"].insert_one(extractor_db.to_mongo()) + found = db["listeners"].find_one({"_id": new_extractor.inserted_id}) + removed = db["listeners"].delete_one({"_id": existing_extractor["_id"]}) extractor_out = EventListenerOut.from_mongo(found) print( "extractor updated: " @@ -38,8 +38,8 @@ def callback(ch, method, properties, body): ) return extractor_out else: - new_extractor = db["extractors"].insert_one(extractor_db.to_mongo()) - found = db["extractors"].find_one({"_id": new_extractor.inserted_id}) + new_extractor = db["listeners"].insert_one(extractor_db.to_mongo()) + found = db["listeners"].find_one({"_id": new_extractor.inserted_id}) extractor_out = EventListenerOut.from_mongo(found) print("new extractor registered: " + extractor_name) return extractor_out diff --git a/backend/app/routers/metadata_files.py b/backend/app/routers/metadata_files.py index 12263cc81..943a79d6d 100644 --- a/backend/app/routers/metadata_files.py +++ b/backend/app/routers/metadata_files.py @@ -76,7 +76,7 @@ async def _build_metadata_db_obj( extractor_info = metadata_in.extractor_info if extractor_info is not None: if ( - extractor := await db["extractors"].find_one( + extractor := await db["listeners"].find_one( {"name": extractor_info.name, "version": extractor_info.version} ) ) is not None: @@ -176,7 +176,7 @@ async def replace_file_metadata( extractor_info = metadata_in.extractor_info if extractor_info is not None: if ( - extractor := await db["extractors"].find_one( + extractor := await db["listeners"].find_one( {"name": extractor_info.name, "version": extractor_info.version} ) ) is not None: @@ -267,7 +267,7 @@ async def update_file_metadata( extractor_info = metadata_in.extractor_info if extractor_info is not None: if ( - extractor := await db["extractors"].find_one( + extractor := await db["listeners"].find_one( {"name": extractor_info.name, "version": extractor_info.version} ) ) is not None: @@ -402,7 +402,7 @@ async def delete_file_metadata( extractor_info = metadata_in.extractor_info if extractor_info is not None: if ( - extractor := await db["extractors"].find_one( + extractor := await db["listeners"].find_one( {"name": extractor_info.name, "version": extractor_info.version} ) ) is not None: From 13f5dbb054f389c456e0d0113fb8e1fee5ab2347 Mon Sep 17 00:00:00 2001 From: Max Burnette Date: Mon, 31 Oct 2022 10:04:24 -0500 Subject: [PATCH 34/34] remove renamed models file --- backend/app/models/extractors.py | 43 -------------------------------- 1 file changed, 43 deletions(-) delete mode 100644 backend/app/models/extractors.py diff --git a/backend/app/models/extractors.py b/backend/app/models/extractors.py deleted file mode 100644 index 1976bff36..000000000 --- a/backend/app/models/extractors.py +++ /dev/null @@ -1,43 +0,0 @@ -from datetime import datetime -from pydantic import Field -from typing import Optional, List, Union -from app.models.mongomodel import MongoModel - - -class Repository(MongoModel): - repository_type: str = "git" - repository_url: str = "" - - -class ExtractorIdentifier(MongoModel): - name: str - version: str = "1.0" - updated: datetime = Field(default_factory=datetime.utcnow) - author: str - contributors: List[str] = [] - contexts: List[dict] = [] - repository: Union[list[Repository], None] = None - external_services: List[str] - libraries: List[str] = [] - bibtex: List[str] - maturity: str = "Development" - default_labels: List[str] = [] - process: dict - categories: List[str] = [] - parameters: dict = {} - - -class ExtractorBase(ExtractorIdentifier): - description: str = "" - - -class ExtractorIn(ExtractorBase): - pass - - -class ExtractorDB(ExtractorBase): - pass - - -class ExtractorOut(ExtractorDB): - pass