diff --git a/tests/routers/data/counts_utf8_encoded.csv b/tests/routers/data/counts_utf8_encoded.csv new file mode 100644 index 000000000..ed263ef25 --- /dev/null +++ b/tests/routers/data/counts_utf8_encoded.csv @@ -0,0 +1,4 @@ +hgvs_nt,hgvs_pro,c_0,c_1 +c.1A>T,p.Thr1Ser,10,20 +c.2C>T,p.Thr1Met,8,8 +c.6T>A,p.Phe2Leu,90,2 \ No newline at end of file diff --git a/tests/routers/data/scores_utf8_encoded.csv b/tests/routers/data/scores_utf8_encoded.csv new file mode 100644 index 000000000..f96268444 --- /dev/null +++ b/tests/routers/data/scores_utf8_encoded.csv @@ -0,0 +1,4 @@ +hgvs_nt,hgvs_pro,score +c.1A>T,p.Thr1Ser,0.3 +c.2C>T,p.Thr1Met,0 +c.6T>A,p.Phe2Leu,-1.65 \ No newline at end of file diff --git a/tests/routers/test_score_set.py b/tests/routers/test_score_set.py index 44207f97d..0c6eef6db 100644 --- a/tests/routers/test_score_set.py +++ b/tests/routers/test_score_set.py @@ -336,6 +336,59 @@ def test_add_score_set_variants_scores_and_counts_endpoint(session, client, setu assert score_set == response_data +def test_add_score_set_variants_scores_only_endpoint_utf8_encoded(client, setup_router_db, data_files): + experiment = create_experiment(client) + score_set = create_seq_score_set(client, experiment["urn"]) + scores_csv_path = data_files / "scores_utf8_encoded.csv" + with ( + open(scores_csv_path, "rb") as scores_file, + patch.object(ArqRedis, "enqueue_job", return_value=None) as queue, + ): + response = client.post( + f"/api/v1/score-sets/{score_set['urn']}/variants/data", + files={"scores_file": (scores_csv_path.name, scores_file, "text/csv")}, + ) + queue.assert_called_once() + + assert response.status_code == 200 + response_data = response.json() + jsonschema.validate(instance=response_data, schema=ScoreSet.schema()) + + # We test the worker process that actually adds the variant data separately. Here, we take it as + # fact that it would have succeeded. + score_set.update({"processingState": "processing"}) + assert score_set == response_data + + +def test_add_score_set_variants_scores_and_counts_endpoint_utf8_encoded(session, client, setup_router_db, data_files): + experiment = create_experiment(client) + score_set = create_seq_score_set(client, experiment["urn"]) + scores_csv_path = data_files / "scores_utf8_encoded.csv" + counts_csv_path = data_files / "counts_utf8_encoded.csv" + with ( + open(scores_csv_path, "rb") as scores_file, + open(counts_csv_path, "rb") as counts_file, + patch.object(ArqRedis, "enqueue_job", return_value=None) as queue, + ): + response = client.post( + f"/api/v1/score-sets/{score_set['urn']}/variants/data", + files={ + "scores_file": (scores_csv_path.name, scores_file, "text/csv"), + "counts_file": (counts_csv_path.name, counts_file, "text/csv"), + }, + ) + queue.assert_called_once() + + assert response.status_code == 200 + response_data = response.json() + jsonschema.validate(instance=response_data, schema=ScoreSet.schema()) + + # We test the worker process that actually adds the variant data separately. Here, we take it as + # fact that it would have succeeded. + score_set.update({"processingState": "processing"}) + assert score_set == response_data + + def test_cannot_add_scores_to_score_set_without_email(session, client, setup_router_db, data_files): experiment = create_experiment(client) score_set = create_seq_score_set(client, experiment["urn"])