diff --git a/Examples/Sample.py b/Examples/Sample.py
index 3292780..e0a315f 100644
--- a/Examples/Sample.py
+++ b/Examples/Sample.py
@@ -22,22 +22,17 @@
# session_factory= dbconnection.createConnection('mssql', "(local)", "ODM2", "ODM", "odm")#win MSSQL
-
# session_factory= dbconnection.createConnection('mssql', "arroyoodm2", "", "ODM", "odm")#mac/linux MSSQL
# session_factory = dbconnection.createConnection('sqlite', '/Users/stephanie/DEV/YODA-Tools/tests/test_files/XL_specimen.sqlite', 2.0)
-
#_session = session_factory.getSession()
read = ReadODM2(session_factory)
create = CreateODM2(session_factory)
-
-
-
# Run some basic sample queries.
# ------------------------------
# Get all of the variables from the database and print their names to the console
diff --git a/odm2api/ODM2/models.py b/odm2api/ODM2/models.py
index 7b6d5b2..1b7a049 100644
--- a/odm2api/ODM2/models.py
+++ b/odm2api/ODM2/models.py
@@ -5,7 +5,6 @@
from sqlalchemy import BigInteger, Boolean, Column, Date, DateTime, Float, ForeignKey, Integer, String, case
from sqlalchemy.dialects import mysql, postgresql, sqlite
from sqlalchemy.orm import relationship
-
Base = modelBase.Base
BigIntegerType = BigInteger()
@@ -13,6 +12,9 @@
BigIntegerType = BigIntegerType.with_variant(postgresql.BIGINT(), 'postgresql')
BigIntegerType = BigIntegerType.with_variant(mysql.BIGINT(), 'mysql')
+DateTimeType = DateTime()
+DateTimeType = DateTimeType.with_variant(sqlite.INTEGER(), 'sqlite')
+
def is_hex(s):
try:
@@ -404,9 +406,9 @@ class Results(Base):
ProcessingLevelID = Column('processinglevelid', ForeignKey(ProcessingLevels.ProcessingLevelID),
nullable=False)
ResultDateTime = Column('resultdatetime', DateTime)
- ResultDateTimeUTCOffset = Column('resultdatetimeutcoffset', BigInteger)
+ ResultDateTimeUTCOffset = Column('resultdatetimeutcoffset', BigIntegerType)
ValidDateTime = Column('validdatetime', DateTime)
- ValidDateTimeUTCOffset = Column('validdatetimeutcoffset', BigInteger)
+ ValidDateTimeUTCOffset = Column('validdatetimeutcoffset', BigIntegerType)
StatusCV = Column('statuscv', ForeignKey(CVStatus.Name), index=True)
SampledMediumCV = Column('sampledmediumcv', ForeignKey(CVMediumType.Name), nullable=False, index=True)
ValueCount = Column('valuecount', Integer, nullable=False)
@@ -503,7 +505,7 @@ class InstrumentOutputVariables(Base):
class DataLoggerFileColumns(Base):
DataLoggerFileColumnID = Column('dataloggerfilecolumnid', Integer, primary_key=True, nullable=False)
- ResultID = Column('resultid', BigInteger, ForeignKey(Results.ResultID))
+ ResultID = Column('resultid', BigIntegerType, ForeignKey(Results.ResultID))
DataLoggerFileID = Column('dataloggerfileid', Integer,
ForeignKey(DataLoggerFiles.DataLoggerFileID), nullable=False)
InstrumentOutputVariableID = Column('instrumentoutputvariableid', Integer,
@@ -861,7 +863,7 @@ class ActionAnnotations(Base):
class EquipmentAnnotations(Base):
BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False)
- EquipmentID = Column('valueid', BigInteger, ForeignKey(Equipment.EquipmentID), nullable=False)
+ EquipmentID = Column('valueid', BigIntegerType, ForeignKey(Equipment.EquipmentID), nullable=False)
AnnotationID = Column('annotationid', ForeignKey(Annotations.AnnotationID), nullable=False)
AnnotationObj = relationship(Annotations)
@@ -1640,7 +1642,7 @@ class CategoricalResultValues(Base):
ValueID = Column('valueid', BigIntegerType, primary_key=True)
ResultID = Column('resultid', ForeignKey(CategoricalResults.ResultID), nullable=False)
DataValue = Column('datavalue', String(255), nullable=False)
- ValueDateTime = Column('valuedatetime', DateTime, nullable=False)
+ ValueDateTime = Column('valuedatetime', DateTimeType, nullable=False)
ValueDateTimeUTCOffset = Column('valuedatetimeutcoffset', Integer, nullable=False)
ResultObj = relationship(CategoricalResults)
@@ -1651,7 +1653,7 @@ class MeasurementResultValues(Base):
ValueID = Column('valueid', BigIntegerType, primary_key=True)
ResultID = Column('resultid', ForeignKey(MeasurementResults.ResultID), nullable=False)
DataValue = Column('datavalue', Float(53), nullable=False)
- ValueDateTime = Column('valuedatetime', DateTime, nullable=False)
+ ValueDateTime = Column('valuedatetime', DateTimeType, nullable=False)
ValueDateTimeUTCOffset = Column('valuedatetimeutcoffset', Integer, nullable=False)
ResultObj = relationship(MeasurementResults)
@@ -1661,8 +1663,8 @@ class PointCoverageResultValues(Base):
ValueID = Column('valueid', BigIntegerType, primary_key=True)
ResultID = Column('resultid', ForeignKey(PointCoverageResults.ResultID), nullable=False)
- DataValue = Column('datavalue', BigInteger, nullable=False)
- ValueDateTime = Column('valuedatetime', DateTime, nullable=False)
+ DataValue = Column('datavalue', BigIntegerType, nullable=False)
+ ValueDateTime = Column('valuedatetime', DateTimeType, nullable=False)
ValueDateTimeUTCOffset = Column('valuedatetimeutcoffset', Integer, nullable=False)
XLocation = Column('xlocation', Float(53), nullable=False)
XLocationUnitsID = Column('xlocationunitsid', ForeignKey(Units.UnitsID), nullable=False)
@@ -1687,7 +1689,7 @@ class ProfileResultValues(Base):
ValueID = Column('valueid', BigIntegerType, primary_key=True)
ResultID = Column('resultid', ForeignKey(ProfileResults.ResultID), nullable=False)
DataValue = Column('datavalue', Float(53), nullable=False)
- ValueDateTime = Column('valuedatetime', DateTime, nullable=False)
+ ValueDateTime = Column('valuedatetime', DateTimeType, nullable=False)
ValueDateTimeUTCOffset = Column('valuedatetimeutcoffset', Integer, nullable=False)
ZLocation = Column('zlocation', Float(53), nullable=False)
ZAggregationInterval = Column('zaggregationinterval', Float(53), nullable=False)
@@ -1714,12 +1716,12 @@ class SectionResultValues(Base):
ValueID = Column('valueid', BigIntegerType, primary_key=True)
ResultID = Column('resultid', ForeignKey(SectionResults.ResultID), nullable=False)
DataValue = Column('datavalue', Float(53), nullable=False)
- ValueDateTime = Column('valuedatetime', BigInteger, nullable=False)
- ValueDateTimeUTCOffset = Column('valuedatetimeutcoffset', BigInteger, nullable=False)
+ ValueDateTime = Column('valuedatetime', DateTimeType, nullable=False)
+ ValueDateTimeUTCOffset = Column('valuedatetimeutcoffset', Integer, nullable=False)
XLocation = Column('xlocation', Float(53), nullable=False)
XAggregationInterval = Column('xaggregationinterval', Float(53), nullable=False)
XLocationUnitsID = Column('xlocationunitsid', ForeignKey(Units.UnitsID), nullable=False)
- ZLocation = Column('zlocation', BigInteger, nullable=False)
+ ZLocation = Column('zlocation', BigIntegerType, nullable=False)
ZAggregationInterval = Column('zaggregationinterval', Float(53), nullable=False)
ZLocationUnitsID = Column('zlocationunitsid', ForeignKey(Units.UnitsID), nullable=False)
CensorCodeCV = Column('censorcodecv', ForeignKey(CVCensorCode.Name), nullable=False, index=True)
@@ -1750,7 +1752,7 @@ class SpectraResultValues(Base):
ValueID = Column('valueid', BigIntegerType, primary_key=True)
ResultID = Column('resultid', ForeignKey(SpectraResults.ResultID), nullable=False)
DataValue = Column('datavalue', Float(53), nullable=False)
- ValueDateTime = Column('valuedatetime', DateTime, nullable=False)
+ ValueDateTime = Column('valuedatetime', DateTimeType, nullable=False)
ValueDateTimeUTCOffset = Column('valuedatetimeutcoffset', Integer, nullable=False)
ExcitationWavelength = Column('excitationwavelength', Float(53), nullable=False)
EmissionWavelength = Column('emmistionwavelength', Float(53), nullable=False)
@@ -1779,7 +1781,7 @@ class TimeSeriesResultValues(Base):
ValueID = Column('valueid', BigIntegerType, primary_key=True)
ResultID = Column('resultid', ForeignKey(TimeSeriesResults.ResultID), nullable=False)
DataValue = Column('datavalue', Float(53), nullable=False)
- ValueDateTime = Column('valuedatetime', DateTime, nullable=False)
+ ValueDateTime = Column('valuedatetime', DateTimeType, nullable=False)
ValueDateTimeUTCOffset = Column('valuedatetimeutcoffset', Integer, nullable=False)
CensorCodeCV = Column('censorcodecv', ForeignKey(CVCensorCode.Name), nullable=False, index=True)
QualityCodeCV = Column('qualitycodecv', ForeignKey(CVQualityCode.Name), nullable=False, index=True)
@@ -1805,7 +1807,7 @@ class TrajectoryResultValues(Base):
ValueID = Column('valueid', BigIntegerType, primary_key=True)
ResultID = Column('resultid', ForeignKey(TrajectoryResults.ResultID), nullable=False)
DataValue = Column('datavalue', Float(53), nullable=False)
- ValueDateTime = Column('valuedatetime', DateTime, nullable=False)
+ ValueDateTime = Column('valuedatetime', DateTimeType, nullable=False)
ValueDateTimeUTCOffset = Column('valuedatetimeutcoffset', Integer, nullable=False)
XLocation = Column('xlocation', Float(53), nullable=False)
XLocationUnitsID = Column('xlocationunitsid', ForeignKey(Units.UnitsID), nullable=False)
@@ -1850,8 +1852,8 @@ class TransectResultValues(Base):
ValueID = Column('valueid', BigIntegerType, primary_key=True)
ResultID = Column('resultid', ForeignKey(TransectResults.ResultID), nullable=False)
DataValue = Column('datavalue', Float(53), nullable=False)
- ValueDateTime = Column('valuedatetime', DateTime, nullable=False)
- ValueDateTimeUTCOffset = Column('valuedatetimeutcoffset', DateTime, nullable=False)
+ ValueDateTime = Column('valuedatetime', DateTimeType, nullable=False)
+ ValueDateTimeUTCOffset = Column('valuedatetimeutcoffset', Integer, nullable=False)
XLocation = Column('xlocation', Float(53), nullable=False)
XLocationUnitsID = Column('xlocationunitsid', ForeignKey(Units.UnitsID), nullable=False)
YLocation = Column('ylocation', Float(53), nullable=False)
@@ -1896,7 +1898,7 @@ class TransectResultValues(Base):
class CategoricalResultValueAnnotations(Base):
BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False)
- ValueID = Column('valueid', BigInteger, ForeignKey(CategoricalResultValues.ValueID), nullable=False)
+ ValueID = Column('valueid', BigIntegerType, ForeignKey(CategoricalResultValues.ValueID), nullable=False)
AnnotationID = Column('annotationid', ForeignKey(Annotations.AnnotationID), nullable=False)
AnnotationObj = relationship(Annotations)
@@ -1906,7 +1908,7 @@ class CategoricalResultValueAnnotations(Base):
class MeasurementResultValueAnnotations(Base):
BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False)
- ValueID = Column('valueid', BigInteger, ForeignKey(MeasurementResultValues.ValueID), nullable=False)
+ ValueID = Column('valueid', BigIntegerType, ForeignKey(MeasurementResultValues.ValueID), nullable=False)
AnnotationID = Column('annotationid', ForeignKey(Annotations.AnnotationID), nullable=False)
AnnotationObj = relationship(Annotations)
@@ -1916,7 +1918,7 @@ class MeasurementResultValueAnnotations(Base):
class PointCoverageResultValueAnnotations(Base):
BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False)
- ValueID = Column('valueid', BigInteger, ForeignKey(PointCoverageResultValues.ValueID), nullable=False)
+ ValueID = Column('valueid', BigIntegerType, ForeignKey(PointCoverageResultValues.ValueID), nullable=False)
AnnotationID = Column('annotationid', ForeignKey(Annotations.AnnotationID), nullable=False)
AnnotationObj = relationship(Annotations)
@@ -1926,7 +1928,7 @@ class PointCoverageResultValueAnnotations(Base):
class ProfileResultValueAnnotations(Base):
BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False)
- ValueID = Column('valueid', BigInteger, ForeignKey(ProfileResultValues.ValueID), nullable=False)
+ ValueID = Column('valueid', BigIntegerType, ForeignKey(ProfileResultValues.ValueID), nullable=False)
AnnotationID = Column('annotationid', ForeignKey(Annotations.AnnotationID), nullable=False)
AnnotationObj = relationship(Annotations)
@@ -1936,7 +1938,7 @@ class ProfileResultValueAnnotations(Base):
class SectionResultValueAnnotations(Base):
BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False)
- ValueID = Column('valueid', BigInteger, ForeignKey(SectionResultValues.ValueID), nullable=False)
+ ValueID = Column('valueid', BigIntegerType, ForeignKey(SectionResultValues.ValueID), nullable=False)
AnnotationID = Column('annotationid', ForeignKey(Annotations.AnnotationID), nullable=False)
AnnotationObj = relationship(Annotations)
@@ -1946,7 +1948,7 @@ class SectionResultValueAnnotations(Base):
class SpectraResultValueAnnotations(Base):
BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False)
- ValueID = Column('valueid', BigInteger, ForeignKey(SpectraResultValues.ValueID), nullable=False)
+ ValueID = Column('valueid', BigIntegerType, ForeignKey(SpectraResultValues.ValueID), nullable=False)
AnnotationID = Column('annotationid', ForeignKey(Annotations.AnnotationID), nullable=False)
AnnotationObj = relationship(Annotations)
@@ -1956,7 +1958,7 @@ class SpectraResultValueAnnotations(Base):
class TimeSeriesResultValueAnnotations(Base):
BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False)
- ValueID = Column('valueid', BigInteger, ForeignKey(TimeSeriesResultValues.ValueID), nullable=False)
+ ValueID = Column('valueid', BigIntegerType, ForeignKey(TimeSeriesResultValues.ValueID), nullable=False)
AnnotationID = Column('annotationid', ForeignKey(Annotations.AnnotationID), nullable=False)
AnnotationObj = relationship(Annotations)
@@ -1966,7 +1968,7 @@ class TimeSeriesResultValueAnnotations(Base):
class TrajectoryResultValueAnnotations(Base):
BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False)
- ValueID = Column('valueid', BigInteger, ForeignKey(TrajectoryResultValues.ValueID), nullable=False)
+ ValueID = Column('valueid', BigIntegerType, ForeignKey(TrajectoryResultValues.ValueID), nullable=False)
AnnotationID = Column('annotationid', ForeignKey(Annotations.AnnotationID), nullable=False)
AnnotationObj = relationship(Annotations)
@@ -1976,7 +1978,7 @@ class TrajectoryResultValueAnnotations(Base):
class TransectResultValueAnnotations(Base):
BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False)
- ValueID = Column('valueid', BigInteger, ForeignKey(TransectResultValues.ValueID), nullable=False)
+ ValueID = Column('valueid', BigIntegerType, ForeignKey(TransectResultValues.ValueID), nullable=False)
AnnotationID = Column('annotationid', ForeignKey(Annotations.AnnotationID), nullable=False)
AnnotationObj = relationship(Annotations)
diff --git a/odm2api/ODM2/services/readService.py b/odm2api/ODM2/services/readService.py
index 06252ad..78ba586 100644
--- a/odm2api/ODM2/services/readService.py
+++ b/odm2api/ODM2/services/readService.py
@@ -632,7 +632,8 @@ def getResults(self, ids=None, type=None, uuids=None, actionid=None, simulationi
simulationid (int, optional): SimulationID.
sfid (int, optional): SamplingFeatureID.
variableid (int, optional): VariableID.
- siteid (int, optional): SiteID.
+ siteid (int, optional): SiteID. - goes through related features table and finds all of results
+ recorded at the given site
Returns:
list: List of Result objects
@@ -686,23 +687,133 @@ def getResults(self, ids=None, type=None, uuids=None, actionid=None, simulationi
return None
# Datasets
- def getDataSets(self, codes=None, uuids=None):
+ def getDataSets(self, ids= None, codes=None, uuids=None, dstype=None):
"""
- * Pass nothing - returns a list of all DataSet objects
- * Pass a list of DataSetCode - returns a single DataSet object for each code
- * Pass a list of UUIDS - returns a single DataSet object for each UUID
+ Retrieve a list of Datasets
+
+ Args:
+ ids (list, optional): List of DataSetsIDs.
+ codes (list, optional): List of DataSet Codes.
+ uuids (list, optional): List of Dataset UUIDs string.
+ dstype (str, optional): Type of Dataset from
+ `controlled vocabulary name `_.
+
+
+ Returns:
+ list: List of DataSets Objects
+
+ Examples:
+ >>> READ = ReadODM2(SESSION_FACTORY)
+ >>> READ.getDataSets(ids=[39, 40])
+ >>> READ.getDataSets(codes=['HOME', 'FIELD'])
+ >>> READ.getDataSets(uuids=['a6f114f1-5416-4606-ae10-23be32dbc202',
+ ... '5396fdf3-ceb3-46b6-aaf9-454a37278bb4'])
+ >>> READ.getDataSets(dstype='singleTimeSeries')
+
"""
q = self._session.query(DataSets)
+ if ids:
+ q = q.filter(DataSets.DataSetID.in_(ids))
if codes:
q = q.filter(DataSets.DataSetCode.in_(codes))
if uuids:
q.filter(DataSets.DataSetUUID.in_(uuids))
+ if dstype:
+ q = q.filter(DataSets.DataSetTypeCV == dstype)
try:
return q.all()
except Exception as e:
print('Error running Query {}'.format(e))
return None
+ # Datasets
+
+ def getDataSetsResults(self, ids=None, codes=None, uuids=None, dstype=None):
+ """
+ Retrieve a detailed list of Datasets along with detailed metadata about the datasets
+ and the results contained within them
+
+ **Must specify either DataSetID OR DataSetUUID OR DataSetCode)**
+ Args:
+ ids (list, optional): List of DataSetsIDs.
+ codes (list, optional): List of DataSet Codes.
+ uuids (list, optional): List of Dataset UUIDs string.
+ dstype (str, optional): Type of Dataset from
+ `controlled vocabulary name `_.
+
+
+ Returns:
+ list: List of DataSetsResults Objects
+
+ Examples:
+ >>> READ = ReadODM2(SESSION_FACTORY)
+ >>> READ.getDataSetsResults(ids=[39, 40])
+ >>> READ.getDataSetsResults(codes=['HOME', 'FIELD'])
+ >>> READ.getDataSetsResults(uuids=['a6f114f1-5416-4606-ae10-23be32dbc202',
+ ... '5396fdf3-ceb3-46b6-aaf9-454a37278bb4'])
+ >>> READ.getDataSetsResults(dstype='singleTimeSeries')
+
+ """
+
+ # make sure one of the three arguments has been sent in
+ if all(v is None for v in [ids, codes, uuids]):
+ raise ValueError('Expected DataSetID OR DataSetUUID OR DataSetCode argument')
+
+ q = self._session.query(DataSetsResults)\
+ .join(DataSets)
+ if ids:
+ q = q.filter(DataSets.DataSetID.in_(ids))
+ if codes:
+ q = q.filter(DataSets.DataSetCode.in_(codes))
+ if uuids:
+ q.filter(DataSets.DataSetUUID.in_(uuids))
+ if dstype:
+ q = q.filter(DataSets.DataSetTypeCV == dstype)
+ try:
+ return q.all()
+ except Exception as e:
+ print('Error running Query {}'.format(e))
+ return None
+
+ def getDataSetsValues(self, ids=None, codes=None, uuids=None, dstype=None):
+ """
+ Retrieve a list of datavalues associated with the given dataset info
+
+ **Must specify either DataSetID OR DataSetUUID OR DataSetCode)**
+ Args:
+ ids (list, optional): List of DataSetsIDs.
+ codes (list, optional): List of DataSet Codes.
+ uuids (list, optional): List of Dataset UUIDs string.
+ dstype (str, optional): Type of Dataset from
+ `controlled vocabulary name `_.
+
+
+ Returns:
+ list: List of Result Values Objects
+
+ Examples:
+ >>> READ = ReadODM2(SESSION_FACTORY)
+ >>> READ.getDataSetsValues(ids=[39, 40])
+ >>> READ.getDataSetsValues(codes=['HOME', 'FIELD'])
+ >>> READ.getDataSetsValues(uuids=['a6f114f1-5416-4606-ae10-23be32dbc202',
+ ... '5396fdf3-ceb3-46b6-aaf9-454a37278bb4'])
+ >>> READ.getDataSetsValues(dstype='singleTimeSeries')
+
+ """
+
+ dsr = self.getDataSetsResults(ids, codes, uuids, dstype)
+
+ resids = []
+ for ds in dsr:
+ resids.append(ds.ResultID)
+
+ try:
+ return self.getResultValues(resultids = resids)
+ except Exception as e:
+ print('Error running Query {}'.format(e))
+ return None
+
+
def getSamplingFeatureDatasets(self, ids=None, codes=None, uuids=None, dstype=None):
"""
Retrieve a list of Datasets associated with the given sampling feature data.
@@ -735,7 +846,7 @@ def getSamplingFeatureDatasets(self, ids=None, codes=None, uuids=None, dstype=No
if all(v is None for v in [ids, codes, uuids]):
raise ValueError('Expected samplingFeatureID OR samplingFeatureUUID OR samplingFeatureCode argument')
- sf_query = self._session.query(SamplingFeatures.SamplingFeatureID)
+ sf_query = self._session.query(SamplingFeatures)
if ids:
sf_query = sf_query.filter(SamplingFeatures.SamplingFeatureID.in_(ids))
@@ -743,8 +854,9 @@ def getSamplingFeatureDatasets(self, ids=None, codes=None, uuids=None, dstype=No
sf_query = sf_query.filter(SamplingFeatures.SamplingFeatureCode.in_(codes))
if uuids:
sf_query = sf_query.filter(SamplingFeatures.SamplingFeatureUUID.in_(uuids))
- sf_list = sf_query.all()
-
+ sf_list = []
+ for sf in sf_query.all():
+ sf_list.append(sf.SamplingFeatureID)
q = self._session.query(DataSetsResults)\
.join(Results)\
@@ -1062,7 +1174,7 @@ def getResultValues(self, resultids, starttime=None, endtime=None):
"""
type = self._session.query(Results).filter_by(ResultID=resultids[0]).first().ResultTypeCV
- ResultType = TimeSeriesResults
+ ResultType = TimeSeriesResultValues
if 'categorical' in type.lower():
ResultType = CategoricalResultValues
elif 'measurement' in type.lower():
diff --git a/tests/test_odm2/test_readservice.py b/tests/test_odm2/test_readservice.py
index 6ea9154..a2ce719 100644
--- a/tests/test_odm2/test_readservice.py
+++ b/tests/test_odm2/test_readservice.py
@@ -92,6 +92,66 @@ def test_getSamplingFeatureByID(self):
resapi = self.reader.getSamplingFeatures(ids=[sfid])
assert resapi is not None
+#DataSets
+ def test_getDataSets(self):
+ # get all datasets from the database
+ ds = self.engine.execute('SELECT * FROM DataSets').fetchone()
+ dsid = ds[0]
+
+ dsapi = self.reader.getDataSets(ids=[dsid])
+ assert dsapi is not None
+ assert True
+
+ def test_getDataSetsResults(self):
+ # get all datasetresults from the database
+ dsr = self.engine.execute('SELECT * FROM DataSetsResults').fetchone()
+ dsid = dsr[2]
+
+ dsrapi = self.reader.getDataSetsResults(ids=[dsid])
+ assert dsrapi is not None
+ assert True
+
+ def test_getDataSetsValues(self):
+
+ dsr = self.engine.execute('SELECT * FROM DataSetsResults').fetchone()
+ dsid = dsr[2]
+
+ values= self.reader.getDataSetsValues(ids=[dsid])
+ assert values is not None
+ assert len(values) > 0
+
+
+
+ #ToDo figure out how to actually test this function
+ def test_getSamplingFeatureDataSets(self):
+
+ #find a sampling feature that is associated with a dataset
+ sf = self.engine.execute(
+ 'SELECT * from SamplingFeatures as sf '
+ 'inner join FeatureActions as fa on fa.SamplingFeatureID == sf.SamplingFeatureID '
+ 'inner join Results as r on fa.FeatureActionID == r.FeatureActionID '
+ 'inner join DataSetsResults as ds on r.ResultID == ds.ResultID '
+ ).fetchone()
+ assert len(sf) > 0
+
+ #get the dataset associated with the sampling feature
+ ds = self.engine.execute(
+ 'SELECT * from DataSetsResults as ds '
+ 'inner join Results as r on r.ResultID == ds.ResultID '
+ 'inner join FeatureActions as fa on fa.FeatureActionID == r.FeatureActionID '
+ 'where fa.SamplingFeatureID = ' + str(sf[0])
+ ).fetchone()
+ assert len(ds) > 0
+
+ print (sf[0])
+ # get the dataset associated with the sampling feature using hte api
+ dsapi = self.reader.getSamplingFeatureDatasets(ids=[sf[0]])
+
+ assert dsapi is not None
+ assert len(dsapi) > 0
+ assert ds[1] == dsapi[0].DataSetID
+
+
# Models
"""
TABLE Models
@@ -143,7 +203,7 @@ def test_getRelatedModelsByCode(self):
resapi = self.reader.getRelatedModels(code='swat')
assert resapi is not None
assert len(resapi) > 0
- print(resapi[0].ModelCode)
+ # print(resapi[0].ModelCode)
assert resapi[0].ModelCode == 'swat'
# test converter code that doesn't exist
resapi = self.reader.getRelatedModels(code='None')
@@ -178,7 +238,7 @@ def test_getRelatedModelsByCode(self):
def test_getAllResults(self):
# get all results from the database
res = self.engine.execute('SELECT * FROM Results').fetchall()
- print(res)
+ # print(res)
# get all results using the api
resapi = self.reader.getResults()
assert len(res) == len(resapi)
@@ -246,7 +306,7 @@ def test_getResultsBySimulationID(self):
).first()
assert len(res) > 0
res = rawSql2Alchemy(res, models.Results)
- print(res)
+ # print(res)
# get simulation by id using the api
# resapi = self.reader.getResultsBySimulationID(simulation.SimulationID)