diff --git a/.travis.yml b/.travis.yml index 23a6a8f..422e41d 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,5 +1,3 @@ -# use http://lint.travis-ci.org/ to check the file. - language: python sudo: required @@ -12,7 +10,6 @@ env: global: - secure: "ckOXS/SsEQGGI6XrdJbEhImPbMyB8HBIqGWJ7CkYKezKTTyuJ510LMAK/7xFLyMQ0rXIrHVpCZsGLsAuzK+h2D0/xQTvkLlxJkTkKwKnm6HKg6fOrbEVSAOzCKYAdP+nBpNLGA/kzMW26E+MSWuFQwc7JiKj36vW9uUgRiT8Knwl9YrfRG+Et1bWJIfbfNoBy2gP3eFXyLBKOcgR0E0y2dyst/uRwkkEn9m8as+PWc3zsVaFS6qf5L4OFMLI5z/EZiNXiu5B/AUvpesWT/OXsbpwnxShska3RXZIh+0exu8euop7Oi4o07PI7EasMtncWzjWKLB6hJaz/BRfS/fjH0/Isy64OUXCIaCwQew31e2uW/liYnlNY/PDJfiVYdbMR34YLLrstCdBjHG+l29eN0VrCKrhW9RDPVW5eKZD1EsQPUgEDpYHaigJBwuAguQ0/MMaZ7Z/DVOR4yUsNlKFO7VwdUL4+lQ3mHrdUAEhuZACncZJ03wouvtyiuGC2WPnPAvk97uHwUcJl7Mq/jTy7HrnTRytHVIHZ8LsymAaGL2ukjVLlJ8ex8/36v2glQEkPAT06f9JXkbWJRzutnDiFKli96shMM17qyl2rjjFdC+fVDm7L7xsgJDAgXAuhD9Wwj3iVo/fcz7SmcH+iEekRtgqwbzaT3MbmLRpS4tOarY=" - # Removing the directory will remove the env but leave the cached packages # at $HOME/miniconda/pkgs. That is a win-win because when re-creating the # env we will download only the new packages. @@ -67,7 +64,6 @@ before_install: - conda create --name TEST python=$TRAVIS_PYTHON_VERSION --file requirements.txt --file requirements-dev.txt - source activate TEST -# Test source distribution. install: - python setup.py sdist && version=$(python setup.py --version) && pushd dist && pip install odm2api-${version}.tar.gz && popd @@ -78,7 +74,7 @@ script: fi - if [[ $TEST_TARGET == 'coding_standards' ]]; then - find . -type f -name "*.py" ! -name 'conf.py' | xargs flake8 --max-line-length=100 ; + find . -type f -name "*.py" ! -name 'conf.py' ! -name '_version.py' ! -name 'versioneer.py' | xargs flake8 --max-line-length=110 ; fi - if [[ $TEST_TARGET == 'docs' ]]; then diff --git a/Examples/Sample 1.1.py b/Examples/Sample 1.1.py index 30113e4..03b986e 100644 --- a/Examples/Sample 1.1.py +++ b/Examples/Sample 1.1.py @@ -1,10 +1,13 @@ -__author__ = 'stephanie' +from __future__ import (absolute_import, division, print_function) + import sys import os from odm2api.ODMconnection import dbconnection import pprint from odm2api.ODM1_1_1.services import SeriesService +__author__ = 'stephanie' + this_file = os.path.realpath(__file__) directory = os.path.dirname(this_file) sys.path.insert(0, directory) @@ -92,6 +95,3 @@ pp.pprint(odm1service.get_values_by_series(ser[0].id)) print "The end" - - - diff --git a/Examples/Sample.py b/Examples/Sample.py index f2d2669..dcd8896 100644 --- a/Examples/Sample.py +++ b/Examples/Sample.py @@ -1,6 +1,6 @@ -__author__ = 'stephanie' - +from __future__ import (absolute_import, division, print_function) +__author__ = 'stephanie' #import matplotlib.pyplot as plt diff --git a/Forms/clsDBConfig.py b/Forms/clsDBConfig.py index 6561b5f..93556b8 100644 --- a/Forms/clsDBConfig.py +++ b/Forms/clsDBConfig.py @@ -1,4 +1,5 @@ -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- +from __future__ import (absolute_import, division, print_function) ########################################################################### ## Python code generated with wxFormBuilder (version Jun 5 2014) diff --git a/Forms/frmDBConfig.py b/Forms/frmDBConfig.py index 28ab020..133eb50 100644 --- a/Forms/frmDBConfig.py +++ b/Forms/frmDBConfig.py @@ -1,3 +1,5 @@ +from __future__ import (absolute_import, division, print_function) + """Subclass of clsDBConfiguration, which is generated by wxFormBuilder.""" import wx @@ -55,7 +57,7 @@ def OnBtnTest(self, event): def OnBtnSave(self, event): - + self.parent.EndModal(wx.ID_OK) diff --git a/odm2api/ODM2/__init__.py b/odm2api/ODM2/__init__.py index aff2b45..96c3005 100644 --- a/odm2api/ODM2/__init__.py +++ b/odm2api/ODM2/__init__.py @@ -1,7 +1,8 @@ -from odm2api.base import serviceBase -from odm2api.base import modelBase +from __future__ import (absolute_import, division, print_function) + +from odm2api.base import modelBase, serviceBase __all__ = [ 'serviceBase', 'modelBase', - ] +] diff --git a/odm2api/ODM2/models.py b/odm2api/ODM2/models.py index 91f8fe8..a6eb06b 100644 --- a/odm2api/ODM2/models.py +++ b/odm2api/ODM2/models.py @@ -1,10 +1,11 @@ -from sqlalchemy import BigInteger, Column, Date, DateTime, Float, ForeignKey, Integer, String, Boolean, case -from sqlalchemy.orm import relationship -from sqlalchemy.dialects import postgresql, mysql, sqlite - +from __future__ import (absolute_import, division, print_function) from odm2api.base import modelBase +from sqlalchemy import BigInteger, Boolean, Column, Date, DateTime, Float, ForeignKey, Integer, String, case +from sqlalchemy.dialects import mysql, postgresql, sqlite +from sqlalchemy.orm import relationship + Base = modelBase.Base BigIntegerType = BigInteger() @@ -179,7 +180,6 @@ class Organizations(Base): OrganizationObj = relationship(u'Organizations', remote_side=[OrganizationID]) - class Affiliations(Base): AffiliationID = Column('affiliationid', Integer, primary_key=True, nullable=False) @@ -212,7 +212,6 @@ class Methods(Base): OrganizationObj = relationship(Organizations) - class Actions(Base): """ Actions are performed by people and may have a result. @@ -230,7 +229,6 @@ class Actions(Base): MethodObj = relationship(Methods) - class ActionBy(Base): BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) @@ -274,19 +272,20 @@ class SamplingFeatures(Base): geometry data type. Can be a Point, Curve (profile, trajectory, etc), Surface (flat polygons, etc) or Solid/Volume (although often limited to 2D geometries). """ + FeatureGeometryWKT = Column('featuregeometrywkt', String(50)) """str: The location geometry of the sampling feature on the Earth expressed as well known text (WKT). Can be a Point, Curve (profile, trajectory, etc.), Surface (flat polygons, etc.), or Solid/Volume (although often limited to 2D geometries).""" __mapper_args__ = { - # 'polymorphic_on': SamplingFeatureTypeCV, - "polymorphic_on": case([ - (SamplingFeatureTypeCV == "Specimen", "Specimen"), - (SamplingFeatureTypeCV == "Site", "Site"), - ], else_="samplingfeatures"), - - 'polymorphic_identity':'samplingfeatures', + 'polymorphic_on': case( + [ + (SamplingFeatureTypeCV == 'Specimen', 'Specimen'), + (SamplingFeatureTypeCV == 'Site', 'Site'), + ], + else_='samplingfeatures'), + 'polymorphic_identity': 'samplingfeatures', } @@ -347,8 +346,12 @@ class TaxonomicClassifiers(Base): Terms for classifying results. """ TaxonomicClassifierID = Column('taxonomicclassifierid', Integer, primary_key=True, nullable=False) - TaxonomicClassifierTypeCV = Column('taxonomicclassifiertypecv', ForeignKey(CVTaxonomicClassifierType.Name), - nullable=False, index=True) + TaxonomicClassifierTypeCV = Column( + 'taxonomicclassifiertypecv', + ForeignKey(CVTaxonomicClassifierType.Name), + nullable=False, + index=True + ) TaxonomicClassifierName = Column('taxonomicclassifiername', String(255), nullable=False) TaxonomicClassifierCommonName = Column('taxonomicclassifiercommonname', String(255)) @@ -383,7 +386,6 @@ class Variables(Base): NoDataValue = Column('nodatavalue', Float(asdecimal=True), nullable=False) - class Results(Base): """ The result of an action. @@ -409,8 +411,6 @@ class Results(Base): SampledMediumCV = Column('sampledmediumcv', ForeignKey(CVMediumType.Name), nullable=False, index=True) ValueCount = Column('valuecount', Integer, nullable=False) - # IntendedObservationSpacing = Column(String(255)) - FeatureActionObj = relationship(FeatureActions) ProcessingLevelObj = relationship(ProcessingLevels) @@ -419,21 +419,19 @@ class Results(Base): VariableObj = relationship(Variables) __mapper_args__ = { - # 'polymorphic_on':ResultTypeCV, - "polymorphic_on":case([ - (ResultTypeCV == "Point coverage", "Point coverage"), - (ResultTypeCV == "Profile Coverage", "Profile Coverage"), - (ResultTypeCV == "Category coverage", "Category coverage"), - (ResultTypeCV == "Transect Coverage", "Transect Coverage"), - (ResultTypeCV == "Spectra coverage", "Spectra coverage"), - (ResultTypeCV == "Time series coverage", "Time series coverage"), - (ResultTypeCV == "Section coverage", "Section coverage"), - (ResultTypeCV == "Profile Coverage", "Profile Coverage"), - (ResultTypeCV == "Trajectory coverage", "Trajectory coverage"), - (ResultTypeCV == "Measurement", "Measurement"), - ], else_="results"), + 'polymorphic_on': case([ + (ResultTypeCV == 'Point coverage', 'Point coverage'), + (ResultTypeCV == 'Profile Coverage', 'Profile Coverage'), + (ResultTypeCV == 'Category coverage', 'Category coverage'), + (ResultTypeCV == 'Transect Coverage', 'Transect Coverage'), + (ResultTypeCV == 'Spectra coverage', 'Spectra coverage'), + (ResultTypeCV == 'Time series coverage', 'Time series coverage'), + (ResultTypeCV == 'Section coverage', 'Section coverage'), + (ResultTypeCV == 'Profile Coverage', 'Profile Coverage'), + (ResultTypeCV == 'Trajectory coverage', 'Trajectory coverage'), + (ResultTypeCV == 'Measurement', 'Measurement'), + ], else_='results'), 'polymorphic_identity': 'results', - # 'with_polymorphic':'*' } @@ -482,7 +480,12 @@ class EquipmentModels(Base): class InstrumentOutputVariables(Base): - InstrumentOutputVariableID = Column('instrumentoutputvariableid', Integer, primary_key=True, nullable=False) + InstrumentOutputVariableID = Column( + 'instrumentoutputvariableid', + Integer, + primary_key=True, + nullable=False + ) ModelID = Column('modelid', Integer, ForeignKey(EquipmentModels.ModelID), nullable=False) VariableID = Column('variableid', Integer, ForeignKey(Variables.VariableID), nullable=False) InstrumentMethodID = Column('instrumentmethodid', Integer, ForeignKey(Methods.MethodID), nullable=False) @@ -499,7 +502,6 @@ class InstrumentOutputVariables(Base): class DataLoggerFileColumns(Base): - DataLoggerFileColumnID = Column('dataloggerfilecolumnid', Integer, primary_key=True, nullable=False) ResultID = Column('resultid', BigInteger, ForeignKey(Results.ResultID)) DataLoggerFileID = Column('dataloggerfileid', Integer, @@ -514,19 +516,28 @@ class DataLoggerFileColumns(Base): ScanIntervalUnitsID = Column('scanintervalunitsid', Integer, ForeignKey(Units.UnitsID)) RecordingInterval = Column('recordinginterval', Float(50)) RecordingIntervalUnitsID = Column('recordingintervalunitsid', Integer, ForeignKey(Units.UnitsID)) - AggregationStatisticCV = Column('aggregationstatisticcv', String(255), ForeignKey(CVAggregationStatistic.Name), - index=True) + AggregationStatisticCV = Column( + 'aggregationstatisticcv', + String(255), + ForeignKey(CVAggregationStatistic.Name), + index=True + ) ResultObj = relationship(Results) DataLoggerFileObj = relationship(DataLoggerFiles) InstrumentOutputVariableObj = relationship(InstrumentOutputVariables) - ScanIntervalUnitsObj = relationship(Units, primaryjoin='DataLoggerFileColumns.ScanIntervalUnitsID == Units.UnitsID') - RecordingIntervalUnitsObj = relationship(Units, primaryjoin='DataLoggerFileColumns.RecordingIntervalUnitsID == Units.UnitsID') + ScanIntervalUnitsObj = relationship( + Units, + primaryjoin='DataLoggerFileColumns.ScanIntervalUnitsID == Units.UnitsID' + ) + RecordingIntervalUnitsObj = relationship( + Units, + primaryjoin='DataLoggerFileColumns.RecordingIntervalUnitsID == Units.UnitsID' + ) class Equipment(Base): - EquipmentID = Column('equipmentid', Integer, primary_key=True, nullable=False) EquipmentCode = Column('equipmentcode', String(50), nullable=False) EquipmentName = Column('equipmentname', String(255), nullable=False) @@ -539,18 +550,14 @@ class Equipment(Base): EquipmentPurchaseDate = Column('equipmentpurchasedate', DateTime, nullable=False) EquipmentPurchaseOrderNumber = Column('equipmentpurchaseordernumber', String(50)) EquipmentDescription = Column('equipmentdescription', String(500)) - # ParentEquipmentID = Column('parentequipmentid', ForeignKey('odm2.equipment.equipmentid')) PersonObj = relationship(People) OrganizationObj = relationship(Organizations) EquipmentModelObj = relationship(EquipmentModels) - # parent = relationship(u'Equipment', remote_side=[EquipmentID]) - class CalibrationReferenceEquipment(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) ActionID = Column('actionid', Integer, ForeignKey(Actions.ActionID), nullable=False) EquipmentID = Column('equipmentid', Integer, ForeignKey(Equipment.EquipmentID), nullable=False) @@ -561,7 +568,6 @@ class CalibrationReferenceEquipment(Base): class EquipmentActions(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) EquipmentID = Column('equipmentid', ForeignKey(Equipment.EquipmentID), nullable=False) ActionID = Column('actionid', ForeignKey(Actions.ActionID), nullable=False) @@ -572,7 +578,6 @@ class EquipmentActions(Base): class EquipmentUsed(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) ActionID = Column('actionid', Integer, ForeignKey(Actions.ActionID), nullable=False) EquipmentID = Column('equipmentid', Integer, ForeignKey(Equipment.EquipmentID), nullable=False) @@ -583,7 +588,6 @@ class EquipmentUsed(Base): class MaintenanceActions(Base): - ActionID = Column('actionid', Integer, ForeignKey(Actions.ActionID), primary_key=True, nullable=False) IsFactoryService = Column('isfactoryservce', Boolean, nullable=False) MaintenanceCode = Column('maintenancecode', String(50)) @@ -597,19 +601,30 @@ class RelatedEquipment(Base): RelationID = Column('relationid', Integer, primary_key=True, nullable=True) EquipmentID = Column('equipmentid', Integer, ForeignKey(Equipment.EquipmentID), nullable=True) RelationshipTypeCV = Column('relationshiptypecv', String(255), nullable=True, index=True) - RelatedEquipmentID = Column('relatedequipmentid', Integer, ForeignKey(Equipment.EquipmentID), nullable=True) + RelatedEquipmentID = Column( + 'relatedequipmentid', + Integer, + ForeignKey(Equipment.EquipmentID), + nullable=True + ) RelationshipStartDateTime = Column('relationshipstartdatetime', DateTime, nullable=True) RelationshipStartDateTimeUTCOffset = Column('relationshipstartdatetimeutcoffset', Integer, nullable=True) RelationshipEndDateTime = Column('relationshipenddatetime', DateTime) RelationshipEndDateTimeUTCOffset = Column('relationshipenddatetimeutcoffset', Integer) - EquipmentObj = relationship(Equipment, primaryjoin='RelatedEquipment.EquipmentID == Equipment.EquipmentID') - RelatedEquipmentObj = relationship(Equipment, primaryjoin='RelatedEquipment.RelatedEquipmentID == Equipment.EquipmentID') + EquipmentObj = relationship( + Equipment, + primaryjoin='RelatedEquipment.EquipmentID == Equipment.EquipmentID' + ) + RelatedEquipmentObj = relationship( + Equipment, + primaryjoin='RelatedEquipment.RelatedEquipmentID == Equipment.EquipmentID' + ) class CalibrationActions(Base): - ActionID = Column('actionid', Integer, ForeignKey(Actions.ActionID),primary_key=True, nullable=False) + ActionID = Column('actionid', Integer, ForeignKey(Actions.ActionID), primary_key=True, nullable=False) CalibrationCheckValue = Column('calibrationcheckvalue', Float(53)) InstrumentOutputVariableID = Column('instrumentoutputvariableid', Integer, ForeignKey(InstrumentOutputVariables.VariableID), nullable=False) @@ -625,7 +640,6 @@ class CalibrationActions(Base): class Directives(Base): - DirectiveID = Column('directiveid', Integer, primary_key=True, nullable=False) DirectiveTypeCV = Column('directivetypecv', ForeignKey(CVDirectiveType.Name), nullable=False, index=True) DirectiveDescription = Column('directivedescription', String(500), nullable=False) @@ -633,7 +647,6 @@ class Directives(Base): class ActionDirectives(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) ActionID = Column('actionid', ForeignKey(Actions.ActionID), nullable=False) DirectiveID = Column('directiveid', ForeignKey(Directives.DirectiveID), nullable=False) @@ -643,12 +656,18 @@ class ActionDirectives(Base): class SpecimenBatchPositions(Base): - #todo fix misspelling + # todo fix misspelling __tablename__ = u'specimenbatchpostions' - FeatureActionID = Column('featureactionid', Integer, ForeignKey(FeatureActions.FeatureActionID), primary_key=True, nullable=False) - BatchPositionsNumber = Column('batchpositionnumber', Integer, nullable = False) - BatchPositionLabel =Column('batchpositionlabel', String(255)) + FeatureActionID = Column( + 'featureactionid', + Integer, + ForeignKey(FeatureActions.FeatureActionID), + primary_key=True, + nullable=False + ) + BatchPositionsNumber = Column('batchpositionnumber', Integer, nullable=False) + BatchPositionLabel = Column('batchpositionlabel', String(255)) FeatureActionObj = relationship(FeatureActions) @@ -658,7 +677,6 @@ class SpecimenBatchPositions(Base): # ################################################################################ class SpatialReferences(Base): - SpatialReferenceID = Column('spatialreferenceid', Integer, primary_key=True, nullable=False) SRSCode = Column('srscode', String(50)) SRSName = Column('srsname', String(255), nullable=False) @@ -668,23 +686,19 @@ class SpatialReferences(Base): class Specimens(SamplingFeatures): - SamplingFeatureID = Column('samplingfeatureid', ForeignKey(SamplingFeatures.SamplingFeatureID), primary_key=True) SpecimenTypeCV = Column('specimentypecv', ForeignKey(CVSpecimenType.Name), nullable=False, index=True) SpecimenMediumCV = Column('specimenmediumcv', ForeignKey(CVMediumType.Name), nullable=False, index=True) IsFieldSpecimen = Column('isfieldspecimen', Boolean, nullable=False) - # SamplingFeatureObj = relationship(SamplingFeatures) __mapper_args__ = { - 'polymorphic_identity':'Specimen', + 'polymorphic_identity': 'Specimen', } - class SpatialOffsets(Base): - SpatialOffsetID = Column('spatialoffsetid', Integer, primary_key=True, nullable=False) SpatialOffsetTypeCV = Column('spatialoffsettypecv', ForeignKey(CVSpatialOffsetType.Name), nullable=False, index=True) @@ -711,33 +725,39 @@ class Sites(SamplingFeatures): Longitude = Column('longitude', Float(53), nullable=False) SpatialReferenceObj = relationship(SpatialReferences) - # SamplingFeatureObj = relationship(SamplingFeatures) __mapper_args__ = { - 'polymorphic_identity':'Site', + 'polymorphic_identity': 'Site', } -class RelatedFeatures(Base): +class RelatedFeatures(Base): RelationID = Column('relationid', Integer, primary_key=True, nullable=False) SamplingFeatureID = Column('samplingfeatureid', ForeignKey(SamplingFeatures.SamplingFeatureID), nullable=False) RelationshipTypeCV = Column('relationshiptypecv', ForeignKey(CVRelationshipType.Name), nullable=False, index=True) - RelatedFeatureID = Column('relatedfeatureid', ForeignKey(SamplingFeatures.SamplingFeatureID), nullable=False) + RelatedFeatureID = Column( + 'relatedfeatureid', + ForeignKey(SamplingFeatures.SamplingFeatureID), + nullable=False + ) SpatialOffsetID = Column('spatialoffsetid', ForeignKey(SpatialOffsets.SpatialOffsetID)) - SamplingFeatureObj = relationship(SamplingFeatures, - primaryjoin='RelatedFeatures.SamplingFeatureID == SamplingFeatures.SamplingFeatureID') - RelatedFeatureObj = relationship(SamplingFeatures, - primaryjoin='RelatedFeatures.RelatedFeatureID == SamplingFeatures.SamplingFeatureID') + SamplingFeatureObj = relationship( + SamplingFeatures, + primaryjoin='RelatedFeatures.SamplingFeatureID == SamplingFeatures.SamplingFeatureID' + ) + RelatedFeatureObj = relationship( + SamplingFeatures, + primaryjoin='RelatedFeatures.RelatedFeatureID == SamplingFeatures.SamplingFeatureID' + ) SpatialOffsetObj = relationship(SpatialOffsets) class SpecimenTaxonomicClassifiers(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) SamplingFeatureID = Column('samplingfeatureid', ForeignKey(Specimens.SamplingFeatureID), nullable=False) TaxonomicClassifierID = Column('taxonomicclassifierid', @@ -773,7 +793,6 @@ class RelatedModels(Base): class Simulations(Base): - SimulationID = Column('simulationid', Integer, primary_key=True, nullable=False) ActionID = Column('actionid', ForeignKey(Actions.ActionID), nullable=False) SimulationName = Column('simulationname', String(255), nullable=False) @@ -794,10 +813,9 @@ class Simulations(Base): Unit = relationship(Units) -# Part of the Provenance table, needed here to meet dependancies +# Part of the Provenance table, needed here to meet dependencies class Citations(Base): - CitationID = Column('citationid', Integer, primary_key=True, nullable=False) Title = Column('title', String(255), nullable=False) Publisher = Column('publisher', String(255), nullable=False) @@ -811,7 +829,12 @@ class Citations(Base): class Annotations(Base): AnnotationID = Column('annotationid', Integer, primary_key=True, nullable=False) - AnnotationTypeCV = Column('annotationtypecv', ForeignKey(CVAnnotationType.Name), nullable=False, index=True) + AnnotationTypeCV = Column( + 'annotationtypecv', + ForeignKey(CVAnnotationType.Name), + nullable=False, + index=True + ) AnnotationCode = Column('annotationcode', String(50)) AnnotationText = Column('annotationtext', String(500), nullable=False) AnnotationDateTime = Column('annotationdatetime', DateTime) @@ -827,7 +850,6 @@ class Annotations(Base): class ActionAnnotations(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) ActionID = Column('actionid', ForeignKey(Actions.ActionID), nullable=False) AnnotationID = Column('annotationid', ForeignKey(Annotations.AnnotationID), nullable=False) @@ -838,7 +860,6 @@ class ActionAnnotations(Base): class EquipmentAnnotations(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) EquipmentID = Column('valueid', BigInteger, ForeignKey(Equipment.EquipmentID), nullable=False) AnnotationID = Column('annotationid', ForeignKey(Annotations.AnnotationID), nullable=False) @@ -849,7 +870,6 @@ class EquipmentAnnotations(Base): class MethodAnnotations(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) MethodID = Column('methodid', ForeignKey(Methods.MethodID), nullable=False) AnnotationID = Column('annotationid', ForeignKey(Annotations.AnnotationID), nullable=False) @@ -860,7 +880,6 @@ class MethodAnnotations(Base): class ResultAnnotations(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) ResultID = Column('resultid', ForeignKey(Results.ResultID), nullable=False) AnnotationID = Column('annotationid', ForeignKey(Annotations.AnnotationID), nullable=False) @@ -873,7 +892,6 @@ class ResultAnnotations(Base): class SamplingFeatureAnnotations(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) SamplingFeatureID = Column('samplingfeatureid', ForeignKey(SamplingFeatures.SamplingFeatureID), nullable=False) @@ -888,7 +906,6 @@ class SamplingFeatureAnnotations(Base): # ################################################################################ class DataSetsResults(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) DataSetID = Column('datasetid', ForeignKey(DataSets.DataSetID), nullable=False) ResultID = Column('resultid', ForeignKey(Results.ResultID), nullable=False) @@ -899,7 +916,6 @@ class DataSetsResults(Base): class DataQuality(Base): - DataQualityID = Column('dataqualityid', Integer, primary_key=True, nullable=False) DataQualityTypeCV = Column('dataqualitytypecv', ForeignKey(CVDataQualityType.Name), nullable=False, index=True) @@ -914,10 +930,13 @@ class DataQuality(Base): class ReferenceMaterials(Base): - ReferenceMaterialID = Column('referencematerialid', Integer, primary_key=True, nullable=False) - ReferenceMaterialMediumCV = Column('referencematerialmediumcv', ForeignKey(CVReferenceMaterialMedium.Name), - nullable=False, index=True) + ReferenceMaterialMediumCV = Column( + 'referencematerialmediumcv', + ForeignKey(CVReferenceMaterialMedium.Name), + nullable=False, + index=True + ) ReferenceMaterialOrganizationID = Column('referencematerialoranizationid', ForeignKey(Organizations.OrganizationID), nullable=False) ReferenceMaterialCode = Column('referencematerialcode', String(50), nullable=False) @@ -935,8 +954,12 @@ class CalibrationStandards(Base): BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) ActionID = Column('actionid', Integer, ForeignKey(Actions.ActionID), nullable=False) - ReferenceMaterialID = Column('referencematerialid', Integer, ForeignKey(ReferenceMaterials.ReferenceMaterialID), - nullable=False) + ReferenceMaterialID = Column( + 'referencematerialid', + Integer, + ForeignKey(ReferenceMaterials.ReferenceMaterialID), + nullable=False + ) ActionObj = relationship(Actions) ReferenceMaterialObj = relationship(ReferenceMaterials) @@ -1030,7 +1053,6 @@ class MethodExtensionPropertyValues(Base): class ResultExtensionPropertyValues(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) ResultID = Column('resultid', ForeignKey(Results.ResultID), nullable=False) PropertyID = Column('propertyid', ForeignKey(ExtensionProperties.PropertyID), nullable=False) @@ -1042,7 +1064,6 @@ class ResultExtensionPropertyValues(Base): class SamplingFeatureExtensionPropertyValues(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) SamplingFeatureID = Column('samplingfeatureid', ForeignKey(SamplingFeatures.SamplingFeatureID), nullable=False) @@ -1055,7 +1076,6 @@ class SamplingFeatureExtensionPropertyValues(Base): class VariableExtensionPropertyValues(Base): - BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) VariableID = Column('variableid', ForeignKey(Variables.VariableID), nullable=False) PropertyID = Column('propertyid', ForeignKey(ExtensionProperties.PropertyID), nullable=False) @@ -1070,7 +1090,12 @@ class VariableExtensionPropertyValues(Base): # ################################################################################ class ExternalIdentifierSystems(Base): - ExternalIdentifierSystemID = Column('externalidentifiersystemid', Integer, primary_key=True, nullable=False) + ExternalIdentifierSystemID = Column( + 'externalidentifiersystemid', + Integer, + primary_key=True, + nullable=False + ) ExternalIdentifierSystemName = Column('externalidentifiersystemname', String(255), nullable=False) IdentifierSystemOrganizationID = Column('identifiersystemorganizationid', ForeignKey(Organizations.OrganizationID), nullable=False) @@ -1130,7 +1155,11 @@ class ReferenceMaterialExternalIdentifiers(Base): ExternalIdentifierSystemID = Column('externalidentifiersystemid', ForeignKey(ExternalIdentifierSystems.ExternalIdentifierSystemID), nullable=False) - ReferenceMaterialExternalIdentifier = Column('referencematerialexternalidentifier', String(255), nullable=False) + ReferenceMaterialExternalIdentifier = Column( + 'referencematerialexternalidentifier', + String(255), + nullable=False + ) ReferenceMaterialExternalIdentifierURI = Column('referencematerialexternalidentifieruri', String(255)) ExternalIdentifierSystemObj = relationship(ExternalIdentifierSystems) @@ -1145,7 +1174,11 @@ class SamplingFeatureExternalIdentifiers(Base): ExternalIdentifierSystemID = Column('externalidentifiersystemid', ForeignKey(ExternalIdentifierSystems.ExternalIdentifierSystemID), nullable=False) - SamplingFeatureExternalIdentifier = Column('samplingfeatureexternalidentifier', String(255), nullable=False) + SamplingFeatureExternalIdentifier = Column( + 'samplingfeatureexternalidentifier', + String(255), + nullable=False + ) SamplingFeatureExternalIdentifierURI = Column('samplingfeatureexternalidentifieruri', String(255)) ExternalIdentifierSystemObj = relationship(ExternalIdentifierSystems) @@ -1160,7 +1193,11 @@ class SpatialReferenceExternalIdentifiers(Base): ExternalIdentifierSystemID = Column('externalidentifiersystemid', ForeignKey(ExternalIdentifierSystems.ExternalIdentifierSystemID), nullable=False) - SpatialReferenceExternalIdentifier = Column('spatialreferenceexternalidentifier', String(255), nullable=False) + SpatialReferenceExternalIdentifier = Column( + 'spatialreferenceexternalidentifier', + String(255), + nullable=False + ) SpatialReferenceExternalIdentifierURI = Column('spatialreferenceexternalidentifieruri', String(255)) ExternalIdentifierSystemObj = relationship(ExternalIdentifierSystems) @@ -1175,7 +1212,11 @@ class TaxonomicClassifierExternalIdentifiers(Base): ExternalIdentifierSystemID = Column('externalidentifiersystemid', ForeignKey(ExternalIdentifierSystems.ExternalIdentifierSystemID), nullable=False) - TaxonomicClassifierExternalIdentifier = Column('taxonomicclassifierexternalidentifier', String(255), nullable=False) + TaxonomicClassifierExternalIdentifier = Column( + 'taxonomicclassifierexternalidentifier', + String(255), + nullable=False + ) TaxonomicClassifierExternalIdentifierURI = Column('taxonomicclassifierexternalidentifieruri', String(255)) ExternalIdentifierSystemObj = relationship(ExternalIdentifierSystems) @@ -1201,6 +1242,7 @@ class VariableExternalIdentifiers(Base): # ################################################################################ class AuthorLists(Base): + BridgeID = Column('bridgeid', Integer, primary_key=True, nullable=False) CitationID = Column('citationid', ForeignKey(Citations.CitationID), nullable=False) PersonID = Column('personid', ForeignKey(People.PersonID), nullable=False) @@ -1222,27 +1264,20 @@ class DataSetCitations(Base): DataSetObj = relationship(DataSets) -# ResultDerivationEquations = Table( -# u'resultderivationequations', Base.metadata, -# Column(u'resultid', ForeignKey(Results.ResultID), primary_key=True), -# Column(u'derivationequationid', ForeignKey('odm2.derivationequations.derivationequationid'), nullable=False), -# schema='odm2' -# ) - - class DerivationEquations(Base): DerivationEquationID = Column('derivationequationid', Integer, primary_key=True, nullable=False) DerivationEquation = Column('derivationequation', String(255), nullable=False) - #ResultsObj = relationship(Results, secondary=ResultDerivationEquations) - class ResultDerivationEquations(Base): ResultID = Column(u'resultid', ForeignKey(Results.ResultID), primary_key=True) - DerivationEquationID = Column(u'derivationequationid', ForeignKey(DerivationEquations.DerivationEquationID), - nullable=False) + DerivationEquationID = Column( + u'derivationequationid', + ForeignKey(DerivationEquations.DerivationEquationID), + nullable=False + ) ResultsObj = relationship(Results) DerivationEquationsObj = relationship(DerivationEquations) @@ -1260,7 +1295,6 @@ class MethodCitations(Base): MethodObj = relationship(Methods) -# from odm2.Annotations.converter import Annotation class RelatedAnnotations(Base): RelationID = Column('relationid', Integer, primary_key=True, nullable=False) @@ -1269,14 +1303,18 @@ class RelatedAnnotations(Base): index=True) RelatedAnnotationID = Column('relatedannotationid', ForeignKey(Annotations.AnnotationID), nullable=False) - AnnotationObj = relationship(Annotations, primaryjoin='RelatedAnnotations.AnnotationID == Annotations.AnnotationID') - RelatedAnnotationObj = relationship(Annotations, - primaryjoin='RelatedAnnotations.RelatedAnnotationID == Annotations.AnnotationID') + AnnotationObj = relationship( + Annotations, + primaryjoin='RelatedAnnotations.AnnotationID == Annotations.AnnotationID' + ) + RelatedAnnotationObj = relationship( + Annotations, + primaryjoin='RelatedAnnotations.RelatedAnnotationID == Annotations.AnnotationID' + ) class RelatedCitations(Base): - RelationID = Column('relationid', Integer, primary_key=True, nullable=False) CitationID = Column('citationid', ForeignKey(Citations.CitationID), nullable=False) RelationshipTypeCV = Column('relationshiptypecv', ForeignKey(CVRelationshipType.Name), nullable=False, @@ -1284,13 +1322,14 @@ class RelatedCitations(Base): RelatedCitationID = Column('relatedcitationid', ForeignKey(Citations.CitationID), nullable=False) CitationObj = relationship(Citations, primaryjoin='RelatedCitations.CitationID == Citations.CitationID') - RelatedCitationObj = relationship(Citations, - primaryjoin='RelatedCitations.RelatedCitationID == Citations.CitationID') + RelatedCitationObj = relationship( + Citations, + primaryjoin='RelatedCitations.RelatedCitationID == Citations.CitationID' + ) class RelatedDataSets(Base): - RelationID = Column('relationid', Integer, primary_key=True, nullable=False) DataSetID = Column('datasetid', ForeignKey(DataSets.DataSetID), nullable=False) RelationshipTypeCV = Column('relationshiptypecv', ForeignKey(CVRelationshipType.Name), nullable=False, @@ -1299,12 +1338,14 @@ class RelatedDataSets(Base): VersionCode = Column('versioncode', String(50)) DataSetObj = relationship(DataSets, primaryjoin='RelatedDataSets.DataSetID == DataSets.DataSetID') - RelatedDataSetObj = relationship(DataSets, primaryjoin='RelatedDataSets.RelatedDataSetID == DataSets.DataSetID') + RelatedDataSetObj = relationship( + DataSets, + primaryjoin='RelatedDataSets.RelatedDataSetID == DataSets.DataSetID' + ) class RelatedResults(Base): - RelationID = Column('relationid', Integer, primary_key=True, nullable=False) ResultID = Column('resultid', ForeignKey(Results.ResultID), nullable=False) RelationshipTypeCV = Column('relationshiptypecv', ForeignKey(CVRelationshipType.Name), nullable=False, @@ -1335,10 +1376,19 @@ class PointCoverageResults(Results): TimeAggregationInterval = Column('timeaggregationinterval', Float(53), nullable=False) TimeAggregationIntervalUnitsID = Column('timeaggregationintervalunitsid', Integer, nullable=False) - IntendedXSpacingUnitsObj = relationship(Units, primaryjoin='PointCoverageResults.IntendedXSpacingUnitsID == Units.UnitsID') - IntendedYSpacingUnitsObj = relationship(Units, primaryjoin='PointCoverageResults.IntendedYSpacingUnitsID == Units.UnitsID') + IntendedXSpacingUnitsObj = relationship( + Units, + primaryjoin='PointCoverageResults.IntendedXSpacingUnitsID == Units.UnitsID' + ) + IntendedYSpacingUnitsObj = relationship( + Units, + primaryjoin='PointCoverageResults.IntendedYSpacingUnitsID == Units.UnitsID' + ) SpatialReferenceObj = relationship(SpatialReferences) - ZLocationUnitsObj = relationship(Units, primaryjoin='PointCoverageResults.ZLocationUnitsID == Units.UnitsID') + ZLocationUnitsObj = relationship( + Units, + primaryjoin='PointCoverageResults.ZLocationUnitsID == Units.UnitsID' + ) __mapper_args__ = {'polymorphic_identity': 'Point coverage'} @@ -1358,8 +1408,14 @@ class ProfileResults(Results): AggregationStatisticCV = Column('aggregationstatisticcv', ForeignKey(CVAggregationStatistic.Name), nullable=False, index=True) - IntendedTimeSpacingUnitsObj = relationship(Units, primaryjoin='ProfileResults.IntendedTimeSpacingUnitsID == Units.UnitsID') - IntendedZSpacingUnitsObj = relationship(Units, primaryjoin='ProfileResults.IntendedZSpacingUnitsID == Units.UnitsID') + IntendedTimeSpacingUnitsObj = relationship( + Units, + primaryjoin='ProfileResults.IntendedTimeSpacingUnitsID == Units.UnitsID' + ) + IntendedZSpacingUnitsObj = relationship( + Units, + primaryjoin='ProfileResults.IntendedZSpacingUnitsID == Units.UnitsID' + ) SpatialReferenceObj = relationship(SpatialReferences) XLocationUnitsObj = relationship(Units, primaryjoin='ProfileResults.XLocationUnitsID == Units.UnitsID') YLocationUnitsObj = relationship(Units, primaryjoin='ProfileResults.YLocationUnitsID == Units.UnitsID') @@ -1380,16 +1436,24 @@ class CategoricalResults(Results): QualityCodeCV = Column('qualitycodecv', ForeignKey(CVQualityCode.Name), nullable=False, index=True) SpatialReferenceObj = relationship(SpatialReferences) - XLocationUnitsObj = relationship(Units, primaryjoin='CategoricalResults.XLocationUnitsID == Units.UnitsID') - YLocationUnitsObj = relationship(Units, primaryjoin='CategoricalResults.YLocationUnitsID == Units.UnitsID') - ZLocationUnitsObj = relationship(Units, primaryjoin='CategoricalResults.ZLocationUnitsID == Units.UnitsID') - - __mapper_args__ = {'polymorphic_identity':' Category coverage'} + XLocationUnitsObj = relationship( + Units, + primaryjoin='CategoricalResults.XLocationUnitsID == Units.UnitsID' + ) + YLocationUnitsObj = relationship( + Units, + primaryjoin='CategoricalResults.YLocationUnitsID == Units.UnitsID' + ) + ZLocationUnitsObj = relationship( + Units, + primaryjoin='CategoricalResults.ZLocationUnitsID == Units.UnitsID' + ) + + __mapper_args__ = {'polymorphic_identity': ' Category coverage'} class TransectResults(Results): - ResultID = Column('resultid', ForeignKey(Results.ResultID), primary_key=True) ZLocation = Column('zlocation', Float(53)) ZLocationUnitsID = Column('zlocationunitsid', ForeignKey(Units.UnitsID)) @@ -1401,8 +1465,14 @@ class TransectResults(Results): AggregationStatisticCV = Column('aggregationstatisticcv', ForeignKey(CVAggregationStatistic.Name), nullable=False, index=True) - IntendedTimeSpacingUnitsObj = relationship(Units, primaryjoin='TransectResults.IntendedTimeSpacingUnitsID == Units.UnitsID') - IntendedTransectSpacingUnitsObj = relationship(Units, primaryjoin='TransectResults.IntendedTransectSpacingUnitsID == Units.UnitsID') + IntendedTimeSpacingUnitsObj = relationship( + Units, + primaryjoin='TransectResults.IntendedTimeSpacingUnitsID == Units.UnitsID' + ) + IntendedTransectSpacingUnitsObj = relationship( + Units, + primaryjoin='TransectResults.IntendedTransectSpacingUnitsID == Units.UnitsID' + ) SpatialReferenceObj = relationship(SpatialReferences) ZLocationUnitsObj = relationship(Units, primaryjoin='TransectResults.ZLocationUnitsID == Units.UnitsID') @@ -1424,13 +1494,16 @@ class SpectraResults(Results): AggregationStatisticCV = Column('aggregationstatisticcv', ForeignKey(CVAggregationStatistic.Name), nullable=False, index=True) - IntendedWavelengthSpacingUnitsObj = relationship(Units, primaryjoin='SpectraResults.IntendedWavelengthSpacingUnitsID == Units.UnitsID') + IntendedWavelengthSpacingUnitsObj = relationship( + Units, + primaryjoin='SpectraResults.IntendedWavelengthSpacingUnitsID == Units.UnitsID' + ) SpatialReferenceObj = relationship(SpatialReferences) XLocationUnitsObj = relationship(Units, primaryjoin='SpectraResults.XLocationUnitsID == Units.UnitsID') YLocationUnitsObj = relationship(Units, primaryjoin='SpectraResults.YLocationUnitsID == Units.UnitsID') ZLocationUnitsObj = relationship(Units, primaryjoin='SpectraResults.ZLocationUnitsID == Units.UnitsID') - __mapper_args__ = {'polymorphic_identity':'Spectra coverage'} + __mapper_args__ = {'polymorphic_identity': 'Spectra coverage'} class TimeSeriesResults(Results): @@ -1448,14 +1521,16 @@ class TimeSeriesResults(Results): AggregationStatisticCV = Column('aggregationstatisticcv', ForeignKey(CVAggregationStatistic.Name), nullable=False, index=True) - IntendedTimeSpacingUnitsObj = relationship(Units, - primaryjoin='TimeSeriesResults.IntendedTimeSpacingUnitsID == Units.UnitsID') + IntendedTimeSpacingUnitsObj = relationship( + Units, + primaryjoin='TimeSeriesResults.IntendedTimeSpacingUnitsID == Units.UnitsID' + ) SpatialReferenceObj = relationship(SpatialReferences) XLocationUnitsObj = relationship(Units, primaryjoin='TimeSeriesResults.XLocationUnitsID == Units.UnitsID') YLocationUnitsObj = relationship(Units, primaryjoin='TimeSeriesResults.YLocationUnitsID == Units.UnitsID') ZLocationUnitsObj = relationship(Units, primaryjoin='TimeSeriesResults.ZLocationUnitsID == Units.UnitsID') - __mapper_args__ = {'polymorphic_identity':'Time series coverage'} + __mapper_args__ = {'polymorphic_identity': 'Time series coverage'} class SectionResults(Results): @@ -1470,16 +1545,31 @@ class SectionResults(Results): IntendedZSpacingUnitsID = Column('intendedzspacingunitsid', ForeignKey(Units.UnitsID)) IntendedTimeSpacing = Column('intendedtimespacing', Float(53)) IntendedTimeSpacingUnitsID = Column('intendedtimespacingunitsid', ForeignKey(Units.UnitsID)) - AggregationStatisticCV = Column('aggregationstatisticcv', ForeignKey(CVAggregationStatistic.Name), - nullable=False, index=True) - - IntendedTimeSpacingUnitsObj = relationship(Units, primaryjoin='SectionResults.IntendedTimeSpacingUnitsID == Units.UnitsID') - IntendedXSpacingUnitsObj = relationship(Units, primaryjoin='SectionResults.IntendedXSpacingUnitsID == Units.UnitsID') - IntendedZSpacingUnitsObj = relationship(Units, primaryjoin='SectionResults.IntendedZSpacingUnitsID == Units.UnitsID') + AggregationStatisticCV = Column( + 'aggregationstatisticcv', + ForeignKey(CVAggregationStatistic.Name), + nullable=False, + index=True + ) + + IntendedTimeSpacingUnitsObj = relationship( + Units, + primaryjoin='SectionResults.IntendedTimeSpacingUnitsID == Units.UnitsID' + ) + + IntendedXSpacingUnitsObj = relationship( + Units, + primaryjoin='SectionResults.IntendedXSpacingUnitsID == Units.UnitsID' + ) + + IntendedZSpacingUnitsObj = relationship( + Units, + primaryjoin='SectionResults.IntendedZSpacingUnitsID == Units.UnitsID' + ) SpatialReferenceObj = relationship(SpatialReferences) YLocationUnitsObj = relationship(Units, primaryjoin='SectionResults.YLocationUnitsID == Units.UnitsID') - __mapper_args__ = {'polymorphic_identity':'Section coverage'} + __mapper_args__ = {'polymorphic_identity': 'Section coverage'} class TrajectoryResults(Results): @@ -1493,12 +1583,17 @@ class TrajectoryResults(Results): AggregationStatisticCV = Column('aggregationstatisticcv', ForeignKey(CVAggregationStatistic.Name), nullable=False, index=True) - IntendedTimeSpacingUnitsObj = relationship(Units, primaryjoin='TrajectoryResults.IntendedTimeSpacingUnitsID == Units.UnitsID') - IntendedTrajectorySpacingUnitsObj = relationship(Units, - primaryjoin='TrajectoryResults.IntendedTrajectorySpacingUnitsID == Units.UnitsID') + IntendedTimeSpacingUnitsObj = relationship( + Units, + primaryjoin='TrajectoryResults.IntendedTimeSpacingUnitsID == Units.UnitsID' + ) + IntendedTrajectorySpacingUnitsObj = relationship( + Units, + primaryjoin='TrajectoryResults.IntendedTrajectorySpacingUnitsID == Units.UnitsID' + ) SpatialReferenceObj = relationship(SpatialReferences) - __mapper_args__ = {'polymorphic_identity':'Trajectory coverage'} + __mapper_args__ = {'polymorphic_identity': 'Trajectory coverage'} class MeasurementResults(Results): @@ -1520,12 +1615,24 @@ class MeasurementResults(Results): nullable=False) SpatialReferenceObj = relationship(SpatialReferences) - TimeAggregationIntervalUnitsObj = relationship(Units, primaryjoin='MeasurementResults.TimeAggregationIntervalUnitsID == Units.UnitsID') - XLocationUnitsObj = relationship(Units, primaryjoin='MeasurementResults.XLocationUnitsID == Units.UnitsID') - YLocationUnitsObj = relationship(Units, primaryjoin='MeasurementResults.YLocationUnitsID == Units.UnitsID') - ZLocationUnitsObj = relationship(Units, primaryjoin='MeasurementResults.ZLocationUnitsID == Units.UnitsID') - - __mapper_args__ = {'polymorphic_identity':'Measurement'} + TimeAggregationIntervalUnitsObj = relationship( + Units, + primaryjoin='MeasurementResults.TimeAggregationIntervalUnitsID == Units.UnitsID' + ) + XLocationUnitsObj = relationship( + Units, + primaryjoin='MeasurementResults.XLocationUnitsID == Units.UnitsID' + ) + YLocationUnitsObj = relationship( + Units, + primaryjoin='MeasurementResults.YLocationUnitsID == Units.UnitsID' + ) + ZLocationUnitsObj = relationship( + Units, + primaryjoin='MeasurementResults.ZLocationUnitsID == Units.UnitsID' + ) + + __mapper_args__ = {'polymorphic_identity': 'Measurement'} class CategoricalResultValues(Base): @@ -1550,7 +1657,6 @@ class MeasurementResultValues(Base): ResultObj = relationship(MeasurementResults) - class PointCoverageResultValues(Base): ValueID = Column('valueid', BigIntegerType, primary_key=True) @@ -1566,8 +1672,14 @@ class PointCoverageResultValues(Base): QualityCodeCV = Column('qualitycodecv', ForeignKey(CVQualityCode.Name), nullable=False, index=True) ResultObj = relationship(PointCoverageResults) - XLocationUnitsObj = relationship(Units, primaryjoin='PointCoverageResultValues.XLocationUnitsID == Units.UnitsID') - YLocationUnitsobj = relationship(Units, primaryjoin='PointCoverageResultValues.YLocationUnitsID == Units.UnitsID') + XLocationUnitsObj = relationship( + Units, + primaryjoin='PointCoverageResultValues.XLocationUnitsID == Units.UnitsID' + ) + YLocationUnitsobj = relationship( + Units, + primaryjoin='PointCoverageResultValues.YLocationUnitsID == Units.UnitsID' + ) class ProfileResultValues(Base): @@ -1587,8 +1699,14 @@ class ProfileResultValues(Base): nullable=False) ResultObj = relationship(ProfileResults) - TimeAggregationIntervalUnitsObj = relationship(Units, primaryjoin='ProfileResultValues.TimeAggregationIntervalUnitsID == Units.UnitsID') - ZLocationUnitsObj = relationship(Units, primaryjoin='ProfileResultValues.ZLocationUnitsID == Units.UnitsID') + TimeAggregationIntervalUnitsObj = relationship( + Units, + primaryjoin='ProfileResultValues.TimeAggregationIntervalUnitsID == Units.UnitsID' + ) + ZLocationUnitsObj = relationship( + Units, + primaryjoin='ProfileResultValues.ZLocationUnitsID == Units.UnitsID' + ) class SectionResultValues(Base): @@ -1613,9 +1731,18 @@ class SectionResultValues(Base): nullable=False) ResultObj = relationship(SectionResults) - TimeAggregationIntervalUnitsObj = relationship(Units, primaryjoin='SectionResultValues.TimeAggregationIntervalUnitsID == Units.UnitsID') - XLocationUnitsObj = relationship(Units, primaryjoin='SectionResultValues.XLocationUnitsID == Units.UnitsID') - ZLocationUnitsObj = relationship(Units, primaryjoin='SectionResultValues.ZLocationUnitsID == Units.UnitsID') + TimeAggregationIntervalUnitsObj = relationship( + Units, + primaryjoin='SectionResultValues.TimeAggregationIntervalUnitsID == Units.UnitsID' + ) + XLocationUnitsObj = relationship( + Units, + primaryjoin='SectionResultValues.XLocationUnitsID == Units.UnitsID' + ) + ZLocationUnitsObj = relationship( + Units, + primaryjoin='SectionResultValues.ZLocationUnitsID == Units.UnitsID' + ) class SpectraResultValues(Base): @@ -1635,12 +1762,18 @@ class SpectraResultValues(Base): nullable=False) ResultObj = relationship(SpectraResults) - TimeAggregationIntervalUnitsObj = relationship(Units, primaryjoin='SpectraResultValues.TimeAggregationIntervalUnitsID == Units.UnitsID') - WavelengthUnitsObj = relationship(Units, primaryjoin='SpectraResultValues.WavelengthUnitsID == Units.UnitsID') + TimeAggregationIntervalUnitsObj = relationship( + Units, + primaryjoin='SpectraResultValues.TimeAggregationIntervalUnitsID == Units.UnitsID' + ) + WavelengthUnitsObj = relationship( + Units, + primaryjoin='SpectraResultValues.WavelengthUnitsID == Units.UnitsID' + ) -class TimeSeriesResultValues(Base): +class TimeSeriesResultValues(Base): ValueID = Column('valueid', BigIntegerType, primary_key=True) ResultID = Column('resultid', ForeignKey(TimeSeriesResults.ResultID), nullable=False) @@ -1657,8 +1790,8 @@ class TimeSeriesResultValues(Base): TimeAggregationIntervalUnitsObj = relationship(Units) def get_columns(self): - return ["ValueID", "ResultID", "DataValue", "ValueDateTime", "ValueDateTimeUTCOffset", - "CensorCodeCV", "QualityCodeCV", "TimeAggregationInterval", "TimeAggregationIntervalUnitsID"] + return ['ValueID', 'ResultID', 'DataValue', 'ValueDateTime', 'ValueDateTimeUTCOffset', + 'CensorCodeCV', 'QualityCodeCV', 'TimeAggregationInterval', 'TimeAggregationIntervalUnitsID'] def list_repr(self): return [self.ValueID, self.ResultID, self.DataValue, self.ValueDateTime, self.ValueDateTimeUTCOffset, @@ -1666,7 +1799,6 @@ def list_repr(self): self.TimeAggregationIntervalUnitsID] - class TrajectoryResultValues(Base): ValueID = Column('valueid', BigIntegerType, primary_key=True) @@ -1681,7 +1813,11 @@ class TrajectoryResultValues(Base): ZLocation = Column('zlocation', Float(53), nullable=False) ZLocationUnitsID = Column('zlocationunitsid', ForeignKey(Units.UnitsID), nullable=False) TrajectoryDistance = Column('trajectorydistance', Float(53), nullable=False) - TrajectoryDistanceAggregationInterval = Column('trajectorydistanceaggregationinterval', Float(53), nullable=False) + TrajectoryDistanceAggregationInterval = Column( + 'trajectorydistanceaggregationinterval', + Float(53), + nullable=False + ) TrajectoryDistanceUnitsID = Column('trajectorydistanceunitsid', Integer, nullable=False) CensorCodeCV = Column('censorcodecv', ForeignKey(CVCensorCode.Name), nullable=False, index=True) QualityCodeCV = Column('qualitycodecv', ForeignKey(CVQualityCode.Name), nullable=False, index=True) @@ -1690,11 +1826,22 @@ class TrajectoryResultValues(Base): nullable=False) ResultObj = relationship(TrajectoryResults) - TimeAggregationIntervalUnitsObj = relationship(Units, - primaryjoin='TrajectoryResultValues.TimeAggregationIntervalUnitsID == Units.UnitsID') - XLocationUnitsObj = relationship(Units, primaryjoin='TrajectoryResultValues.XLocationUnitsID == Units.UnitsID') - YLocationUnitsObj = relationship(Units, primaryjoin='TrajectoryResultValues.YLocationUnitsID == Units.UnitsID') - ZLocationUnitsObj = relationship(Units, primaryjoin='TrajectoryResultValues.ZLocationUnitsID == Units.UnitsID') + TimeAggregationIntervalUnitsObj = relationship( + Units, + primaryjoin='TrajectoryResultValues.TimeAggregationIntervalUnitsID == Units.UnitsID' + ) + XLocationUnitsObj = relationship( + Units, + primaryjoin='TrajectoryResultValues.XLocationUnitsID == Units.UnitsID' + ) + YLocationUnitsObj = relationship( + Units, + primaryjoin='TrajectoryResultValues.YLocationUnitsID == Units.UnitsID' + ) + ZLocationUnitsObj = relationship( + Units, + primaryjoin='TrajectoryResultValues.ZLocationUnitsID == Units.UnitsID' + ) class TransectResultValues(Base): @@ -1709,21 +1856,40 @@ class TransectResultValues(Base): YLocation = Column('ylocation', Float(53), nullable=False) YLocationUnitsID = Column('ylocationunitsid', ForeignKey(Units.UnitsID), nullable=False) TransectDistance = Column('transectdistance', Float(53), nullable=False) - TransectDistanceAggregationInterval = Column('transectdistanceaggregationinterval', Float(53), nullable=False) + TransectDistanceAggregationInterval = Column( + 'transectdistanceaggregationinterval', + Float(53), + nullable=False + ) TransectDistanceUnitsID = Column('transectdistanceunitsid', ForeignKey(Units.UnitsID), nullable=False) CensorCodeCV = Column('censorcodecv', ForeignKey(CVCensorCode.Name), nullable=False, index=True) QualityCodeCV = Column('qualitycodecv', ForeignKey(CVQualityCode.Name), nullable=False, index=True) AggregationStatisticCV = Column('aggregationstatisticcv', ForeignKey(CVAggregationStatistic.Name), nullable=False, index=True) TimeAggregationInterval = Column('timeaggregationinterval', Float(53), nullable=False) - TimeAggregationIntervalUnitsID = Column('timeaggregationintervalunitsid', ForeignKey(Units.UnitsID), nullable=False) + TimeAggregationIntervalUnitsID = Column( + 'timeaggregationintervalunitsid', + ForeignKey(Units.UnitsID), + nullable=False + ) ResultObj = relationship(TransectResults) - TimeAggregationIntervalUnitsObj = relationship(Units, - primaryjoin='TransectResultValues.TimeAggregationIntervalUnitsID == Units.UnitsID') - XLocationUnitsObj = relationship(Units, primaryjoin='TransectResultValues.XLocationUnitsID == Units.UnitsID') - YLocationUnitsObj = relationship(Units, primaryjoin='TransectResultValues.YLocationUnitsID == Units.UnitsID') - TransectDistanceUnitsObj = relationship(Units, primaryjoin='TransectResultValues.TransectDistanceUnitsID == Units.UnitsID') + TimeAggregationIntervalUnitsObj = relationship( + Units, + primaryjoin='TransectResultValues.TimeAggregationIntervalUnitsID == Units.UnitsID' + ) + XLocationUnitsObj = relationship( + Units, + primaryjoin='TransectResultValues.XLocationUnitsID == Units.UnitsID' + ) + YLocationUnitsObj = relationship( + Units, + primaryjoin='TransectResultValues.YLocationUnitsID == Units.UnitsID' + ) + TransectDistanceUnitsObj = relationship( + Units, + primaryjoin='TransectResultValues.TransectDistanceUnitsID == Units.UnitsID' + ) class CategoricalResultValueAnnotations(Base): @@ -1831,7 +1997,7 @@ def _changeSchema(schema): if Tbl.__table__.schema == schema: return Tbl.__table__.schema = schema - Tbl.__table_args__["schema"]= schema + Tbl.__table_args__['schema'] = schema def _getSchema(engine): @@ -1848,5 +2014,3 @@ def _getSchema(engine): def setSchema(engine): s = _getSchema(engine) _changeSchema(s) - - diff --git a/odm2api/ODM2/services/__init__.py b/odm2api/ODM2/services/__init__.py index 1364652..ecdd49a 100644 --- a/odm2api/ODM2/services/__init__.py +++ b/odm2api/ODM2/services/__init__.py @@ -1,46 +1,15 @@ -__author__ = 'jmeline' -''' -from createService import createAnnotations, createCore, createCV, createDataQuality, createEquipment, \ - createExtensionProperties, createExternalIdentifiers, createLabAnalyses, createODM2, createProvenance, \ - createResults, createSamplingFeatures, createSensors, createSimulation - -from deleteService import deleteAnnotations, deleteCore, deleteCV, deleteDataQuality, deleteEquipment, \ - deleteExtensionProperties, deleteExternalIdentifiers, deleteLabAnalyses, deleteODM2, deleteProvenance, \ - deleteResults, deleteSamplingFeatures, deleteSensors - -from readService import readAnnotations, readCore, readCV, readDataQuality, readEquipment, readExtensionProperties, \ - readExternalIdentifiers, readODM2, readLabAnalyses, readProvenance, readResults, readSamplingFeatures, \ - readSensors, readSimulation - -from updateService import updateAnnotations, updateCore, updateCV, updateDataQuality, updateEquipment, \ - updateExtensionProperties, updateExternalIdentifiers, updateLabAnalyses, updateODM2, updateProvenance, \ - updateResults, updateSamplingFeatures, updateSensors - -__all__ = [ - # Create - 'createAnnotations', 'createCore', 'createCV', 'createDataQuality', 'createEquipment', 'createExtensionProperties', - 'createExternalIdentifiers', 'createLabAnalyses', 'createODM2', 'createProvenance', 'createResults', - 'createSamplingFeatures', 'createSensors', 'createSimulation', - - # Delete - 'deleteSensors', 'deleteAnnotations', 'deleteCore', 'deleteCV', 'deleteDataQuality', 'deleteEquipment', - 'deleteExtensionProperties', 'deleteExternalIdentifiers', 'deleteLabAnalyses', 'deleteODM2', 'deleteProvenance', - 'deleteResults', 'deleteSamplingFeatures', +from __future__ import (absolute_import, division, print_function) - # Read - 'readAnnotations', 'readCore', 'readCV', 'readDataQuality', 'readEquipment', 'readExtensionProperties', - 'readExternalIdentifiers', 'readLabAnalyses', 'readODM2', 'readProvenance', 'readResults', 'readSamplingFeatures', - 'readSensors', 'readSimulation', - - # Update - 'updateAnnotations', 'updateSensors', 'updateSamplingFeatures', 'updateResults', 'updateProvenance', 'updateODM2', - 'updateLabAnalyses', 'updateExternalIdentifiers', 'updateCore', 'updateCV', 'updateDataQuality', 'updateEquipment', - 'updateExtensionProperties' -] -''' from odm2api.ODM2.services.createService import CreateODM2 from odm2api.ODM2.services.deleteService import DeleteODM2 from odm2api.ODM2.services.readService import ReadODM2 from odm2api.ODM2.services.updateService import UpdateODM2 -__all__= ['CreateODM2', 'DeleteODM2', 'ReadODM2', 'UpdateODM2' ] \ No newline at end of file +__author__ = 'jmeline' + +__all__ = [ + 'CreateODM2', + 'DeleteODM2', + 'ReadODM2', + 'UpdateODM2' +] diff --git a/odm2api/ODM2/services/createService.py b/odm2api/ODM2/services/createService.py index 4ce6636..1fceb2f 100644 --- a/odm2api/ODM2/services/createService.py +++ b/odm2api/ODM2/services/createService.py @@ -1,21 +1,15 @@ -__author__ = 'sreeder' +from __future__ import (absolute_import, division, print_function) -import datetime as dt import uuid -# from src.api.ODM2.LikeODM1.converter import Site -from odm2api.ODM2.models import * from odm2api.ODM2 import serviceBase +from odm2api.ODM2.models import TimeSeriesResultValues + +__author__ = 'sreeder' class CreateODM2(serviceBase): - ''' - def __init__(self, session): - self._session = session - ''' - # ################################################################################ # Annotations - # ################################################################################ def create(self, value): self._session.add(value) @@ -27,7 +21,6 @@ def createAll(self, values): self._session.commit() return values - def createVariable(self, var): self._session.add(var) self._session.commit() @@ -37,22 +30,18 @@ def createVariable(self, var): def createMethod(self, method): self._session.add(method) self._session.commit() - return method def createProcessingLevel(self, proclevel): self._session.add(proclevel) self._session.commit() - return proclevel - #send in any type of sampling feature def createSamplingFeature(self, samplingfeature): if samplingfeature.SamplingFeatureUUID is None: samplingfeature.SamplingFeatureUUID = str(uuid.uuid1()) self._session.add(samplingfeature) self._session.commit() - return samplingfeature def createUnit(self, unit): @@ -68,25 +57,21 @@ def createOrganization(self, org): def createPerson(self, person): self._session.add(person) self._session.commit() - return person def createAffiliation(self, affiliation): self._session.add(affiliation) self._session.commit() - return affiliation def createDataset(self, dataset): self._session.add(dataset) self._session.commit() - return dataset def createDatasetResults(self, datasetresult): self._session.add(datasetresult) self._session.commit() - return datasetresult def createAction(self, action): @@ -108,16 +93,13 @@ def createFeatureAction(self, action): def createAnnotations(self, anno): self._session.add(anno) self._session.commit() - return anno - + def createRelatedAction(self, relatedaction): self._session.add(relatedaction) self._session.commit() - return relatedaction - #send in any type of result object def createResult(self, result): if result.ResultUUID is None: result.ResultUUID = str(uuid.uuid1()) @@ -125,19 +107,15 @@ def createResult(self, result): self._session.commit() return result - - def createResultValue(self, value): self._session.add(value) self._session.commit() self._session.flush() return value - def createSpatialReference(self, spatialref): self._session.add(spatialref) self._session.commit() - return spatialref def createModel(self, model): @@ -149,112 +127,28 @@ def createModel(self, model): def createRelatedModel(self, relatedmodel): self._session.add(relatedmodel) self._session.commit() - return relatedmodel def createSimulation(self, simulation): self._session.add(simulation) self._session.commit() - return simulation def createTimeSeriesResultValues(self, datavalues): try: - tablename = TimeSeriesResultValues.__tablename__ - #print ("I am TS saving name the table name", tablename) - datavalues.to_sql(name="TimeSeriesResultValues", - schema=TimeSeriesResultValues.__table_args__['schema'], - if_exists='append', - chunksize=1000, - con=self._session_factory.engine, - index=False) + # FXIME: F841 local variable 'tablename' is assigned to but never used. + # tablename = TimeSeriesResultValues.__tablename__ + datavalues.to_sql( + name='TimeSeriesResultValues', + schema=TimeSeriesResultValues.__table_args__['schema'], + if_exists='append', + chunksize=1000, + con=self._session_factory.engine, + index=False + ) self._session.commit() return datavalues except Exception as e: print(e) return None - - -# def createTimeSeriesResultValues(self, resultid, datavalues, datetimes, datetimeoffsets, censorcodecv, -# qualitycodecv, -# timeaggregationinterval, timeaggregationunit): -# -# -# try: -# values = TimeSeriesResultValues() -# for i in range(len(datavalues)): -# values.ResultID = resultid -# values.CensorCodeCV = censorcodecv -# values.QualityCodeCV = qualitycodecv -# values.TimeAggregationInterval = timeaggregationinterval -# values.TimeAggregationIntervalUnitsID = timeaggregationunit -# values.DataValue = datavalues[i] -# values.ValueDateTime = datetimes[i] -# values.ValueDateTimeUTCOffset = datetimeoffsets[i] -# self._session.add(values) -# self._session.commit() -# return values -# except Exception, e: -# print e -# return None -# ''' -# -# def createTimeSeriesResultValues(self, datavalues): -# try: -# #using Pandas built-in --slow -# #changing way values sent --unknown error on insert -# #cols = datavalues.columns.tolist() -# #['ValueDateTime', 'DataValue', 'TimeAggregationInterval', 'TimeAggregationIntervalUnitsID', 'QualityCodeCV', 'CensorCodeCV', 'ResultID', 'ValueDateTimeUTCOffset'] -# #cols = ['ResultID','DataValue','ValueDateTime','ValueDateTimeUTCOffset','CensorCodeCV','QualityCodeCV','TimeAggregationInterval','TimeAggregationIntervalUnitsID'] -# #datavalues = datavalues[cols] -# #print datavalues -# #datavalues.to_sql(name=TimeSeriesResultValues.__tablename__, -# datavalues.to_sql(name="TimeSeriesResultValues", -# schema=TimeSeriesResultValues.__table_args__['schema'], -# if_exists='append', -# chunksize= 1000, -# con=self._session_factory.engine, -# index=False) -# self._session.commit() -# -# -# #using sqlalchemy core --sending empty parameters -# # data = datavalues.to_dict('records') -# # self._session.execute(TimeSeriesResultValues.__table__.insert(data)) -# -# #using cursor and StringIO --not all cursors have the copy_from function -# # print "using cursor" -# # import cStringIO -# # #stream the data using 'to_csv' and StringIO(); then use sql's 'copy_from' function -# # output = cStringIO.StringIO() -# # #ignore the index -# # datavalues.to_csv(output, sep='\t', header=False, index=False) -# # #jump to start of stream -# # output.seek(0) -# # contents = output.getvalue() -# # connection = self._session_factory.engine.raw_connection() -# # cur = connection.cursor() -# # #null values become '' -# # cur.copy_from(output, 'ODM2.TimeSeriesResultValues', null="") -# # connection.commit() -# # cur.close() -# -# #using Bulk Insert * user must have permissions --file created locally code running remote -# # datavalues.to_csv('C:\\Users\\Stephanie\\temp.csv') -# # sql = """ -# # BULK INSERT ODM2.TimeSeriesResultValues -# # FROM 'C:\\Users\\Stephanie\\temp.csv' WITH ( -# # FIELDTERMINATOR=',', -# # ROWTERMINATOR='\\n'); -# # """ -# # self._session.execute(sql) -# -# -# -# -# return datavalues -# except Exception, e: -# print e -# return None -# \ No newline at end of file diff --git a/odm2api/ODM2/services/deleteService.py b/odm2api/ODM2/services/deleteService.py index c8a0bcd..43f944a 100644 --- a/odm2api/ODM2/services/deleteService.py +++ b/odm2api/ODM2/services/deleteService.py @@ -1,108 +1,42 @@ -__author__ = 'jmeline' +from __future__ import (absolute_import, division, print_function) from odm2api.ODM2 import serviceBase -from odm2api.ODM2.models import * +from odm2api.ODM2.models import TimeSeriesResultValues + + +__author__ = 'jmeline' -# ################################################################################ # Annotations -# ################################################################################ + class DeleteODM2(serviceBase): def remove(self, obj): self._session.delete(obj) -# ################################################################################ # CV -# ################################################################################ - - - - - -# ################################################################################ # Core -# ################################################################################ - - - - -# ################################################################################ # Data Quality -# ################################################################################ - - - -# ################################################################################ # Equipment -# ################################################################################ - - - - -# ################################################################################ # Extension Properties -# ################################################################################ - - - - -# ################################################################################ # External Identifiers -# ################################################################################ - - - - -# ################################################################################ # Lab Analyses -# ################################################################################ - - - - -# ################################################################################ # Provenance -# ################################################################################ - - - - -# ################################################################################ # Annotations -# ################################################################################ - - - - -# ################################################################################ # Sampling Features -# ################################################################################ - - - - -# ################################################################################ # Sensors -# ################################################################################ - - - - -# ################################################################################ # Result Values -# ################################################################################ - def deleteTSRValues(self, ids=None, startdate = None, dates=None): + def deleteTSRValues(self, ids=None, startdate=None, dates=None): q = self._session.query(TimeSeriesResultValues) if ids: q = q.filter(TimeSeriesResultValues.ResultID.in_(ids)) if startdate: - #delete all values on or after the startdate + # delete all values on or after the startdate. q = q.filter(TimeSeriesResultValues.ValueDateTime >= startdate) if dates: q = q.filter(TimeSeriesResultValues.ValueDateTime.in_(dates)) numvals = q.count() q.delete(False) - return numvals \ No newline at end of file + return numvals diff --git a/odm2api/ODM2/services/readService.py b/odm2api/ODM2/services/readService.py index f9ac736..45de516 100644 --- a/odm2api/ODM2/services/readService.py +++ b/odm2api/ODM2/services/readService.py @@ -1,10 +1,44 @@ -__author__ = 'sreeder' +from __future__ import (absolute_import, division, print_function) + +from odm2api.ODM2 import serviceBase +from odm2api.ODM2.models import ( + ActionAnnotations, ActionDirectives, ActionExtensionPropertyValues, Actions, + Affiliations, Annotations, AuthorLists, CVActionType, CVAggregationStatistic, + CVAnnotationType, CVCensorCode, CVDataQualityType, CVDataSetType, CVDirectiveType, + CVElevationDatum, CVEquipmentType, CVMediumType, CVMethodType, CVOrganizationType, + CVPropertyDataType, CVQualityCode, CVRelationshipType, CVResultType, CVSamplingFeatureGeoType, + CVSamplingFeatureType, CVSiteType, CVSpatialOffsetType, CVSpeciation, CVSpecimenType, + CVStatus, CVTaxonomicClassifierType, CVUnitsType, CVVariableName, CVVariableType, + CalibrationActions, CalibrationReferenceEquipment, CalibrationStandards, + CategoricalResultValueAnnotations, CategoricalResultValues, CitationExtensionPropertyValues, + CitationExternalIdentifiers, DataLoggerFileColumns, DataLoggerFiles, DataLoggerProgramFiles, + DataQuality, DataSetCitations, DataSets, DerivationEquations, Directives, Equipment, + EquipmentActions, EquipmentAnnotations, EquipmentModels, EquipmentUsed, ExtensionProperties, + ExternalIdentifierSystems, FeatureActions, InstrumentOutputVariables, MaintenanceActions, + MeasurementResultValueAnnotations, MeasurementResultValues, MethodAnnotations, + MethodCitations, MethodExtensionPropertyValues, MethodExternalIdentifiers, + Methods, Models, Organizations, People, PersonExternalIdentifiers, + PointCoverageResultValueAnnotations, PointCoverageResultValues, ProcessingLevels, + ProfileResultValueAnnotations, ProfileResultValues, ReferenceMaterialExternalIdentifiers, + ReferenceMaterialValues, ReferenceMaterials, RelatedActions, RelatedAnnotations, + RelatedCitations, RelatedDataSets, RelatedEquipment, RelatedFeatures, RelatedModels, + RelatedResults, ResultAnnotations, ResultDerivationEquations, ResultExtensionPropertyValues, + ResultNormalizationValues, Results, ResultsDataQuality, SamplingFeatureAnnotations, + SamplingFeatureExtensionPropertyValues, SamplingFeatureExternalIdentifiers, + SamplingFeatures, SectionResultValueAnnotations, SectionResults, Simulations, + SpatialReferenceExternalIdentifiers, SpatialReferences, SpecimenBatchPositions, + SpectraResultValueAnnotations, SpectraResultValues, TaxonomicClassifierExternalIdentifiers, + TaxonomicClassifiers, TimeSeriesResultValueAnnotations, TimeSeriesResultValues, + TimeSeriesResults, TrajectoryResultValueAnnotations, TrajectoryResultValues, + TransectResultValueAnnotations, TransectResultValues, Units, VariableExtensionPropertyValues, + VariableExternalIdentifiers, Variables, +) -from sqlalchemy import func ,not_, bindparam, distinct, exists import pandas as pd -from odm2api.ODM2 import serviceBase -from odm2api.ODM2.models import * +from sqlalchemy import distinct, exists + +__author__ = 'sreeder' class DetailedResult: @@ -15,13 +49,13 @@ def __init__(self, action, result, unit): # result.result_id etc. self.ResultID = result.ResultID - self.SamplingFeatureCode = sc#.SamplingFeatureCode + self.SamplingFeatureCode = sc self.MethodCode = method.MethodCode self.VariableCode = variable.VariableCode self.ProcessingLevelCode = processingLevel.ProcessingLevelCode self.UnitsName = unit.UnitsName - self.SamplingFeatureName = sn#.SamplingFeatureName + self.SamplingFeatureName = sn self.MethodName = method.MethodName self.VariableNameCV = variable.VariableNameCV self.ProcessingLevelDefinition = processingLevel.Definition @@ -34,32 +68,18 @@ def __init__(self, action, result, class DetailedAffiliation: def __init__(self, affiliation, person, org): self.AffiliationID = affiliation.AffiliationID - self.Name = person.PersonFirstName + \ - " " + \ - person.PersonLastName - self.Organization = "(" + org.OrganizationCode + ") " + \ - org.OrganizationName - - # def __repr__(self): - # return str(self.name) + " " + str(self.organization) + self.Name = person.PersonFirstName + ' ' + person.PersonLastName + self.Organization = '(' + org.OrganizationCode + ') ' + org.OrganizationName class ReadODM2(serviceBase): - - - # ################################################################################ # Exists functions - # ################################################################################ - def resultExists(self, result): """ - resultExists(self, result): Check to see if a Result Object exists * Pass Result Object - return a boolean value of wether the given object exists - """ - # unique Result - # FeatureActionID, ResultTypeCV, VariableID, UnitsID, ProcessingLevelID, SampledMediumCV + """ try: ret = self._session.query(exists().where(Results.ResultTypeCV == result.ResultTypeCV) @@ -68,59 +88,53 @@ def resultExists(self, result): .where(Results.ProcessingLevelID == result.ProcessingLevelID) .where(Results.SampledMediumCV == result.SampledMediumCV) ) - # where(Results.FeatureActionID == result.FeatureActionID). return ret.scalar() except: return None - # ################################################################################ # Annotations - # ################################################################################ - def getAnnotations(self, type=None, codes=None, ids=None): """ - def getAnnotations(self, type=None, codes = None, ids = None): - * Pass Nothing - return a list of all objects * Pass AnnotationTypeCV - return a list of all objects of the fiven type * Pass a list of codes - return a list of objects, one for each of the given codes * Pass a list of ids -return a list of objects, one for each of the given ids """ - # TODO What keywords do I use for type + # TODO What keywords do I use for type. a = Annotations if type: - if type == "action": + if type == 'action': a = ActionAnnotations - elif type == "categoricalresultvalue": + elif type == 'categoricalresultvalue': a = CategoricalResultValueAnnotations - elif type == "equipmentannotation": + elif type == 'equipmentannotation': a = EquipmentAnnotations - elif type == "measurementresultvalue": + elif type == 'measurementresultvalue': a = MeasurementResultValueAnnotations - elif type == "method": + elif type == 'method': a = MethodAnnotations - elif type == "pointcoverageresultvalue": + elif type == 'pointcoverageresultvalue': a = PointCoverageResultValueAnnotations - elif type == "profileresultvalue": + elif type == 'profileresultvalue': a = ProfileResultValueAnnotations - elif type == "result": + elif type == 'result': a = ResultAnnotations - elif type == "samplingfeature": + elif type == 'samplingfeature': a = SamplingFeatureAnnotations - elif type == "sectionresultvalue": + elif type == 'sectionresultvalue': a = SectionResultValueAnnotations - elif type == "spectraresultvalue": + elif type == 'spectraresultvalue': a = SpectraResultValueAnnotations - elif type == "timeseriesresultvalue": + elif type == 'timeseriesresultvalue': a = TimeSeriesResultValueAnnotations - elif type == "trajectoryresultvalue": + elif type == 'trajectoryresultvalue': a = TrajectoryResultValueAnnotations - elif type == "transectresultvalue": + elif type == 'transectresultvalue': a = TransectResultValueAnnotations try: - query=self._session.query(a) + query = self._session.query(a) if codes: query = query.filter(Annotations.AnnotationCode.in_(codes)) if ids: @@ -130,85 +144,81 @@ def getAnnotations(self, type=None, codes = None, ids = None): except: return None - # ################################################################################ # CV - # ############################################################################## - def getCVs(self, type): """ getCVs(self, type): * Pass CVType - return a list of all objects of the given type + """ CV = CVActionType - if type == "actiontype": + if type == 'actiontype': CV = CVActionType - elif type == "aggregationstatistic": + elif type == 'aggregationstatistic': CV = CVAggregationStatistic - elif type == "annotationtype": + elif type == 'annotationtype': CV = CVAnnotationType - elif type == "censorcode": + elif type == 'censorcode': CV = CVCensorCode - elif type == "dataqualitytype": + elif type == 'dataqualitytype': CV = CVDataQualityType - elif type == "dataset type": + elif type == 'dataset type': CV = CVDataSetType - elif type == "Directive Type": + elif type == 'Directive Type': CV = CVDirectiveType - elif type == "Elevation Datum": + elif type == 'Elevation Datum': CV = CVElevationDatum - elif type == "Equipment Type": + elif type == 'Equipment Type': CV = CVEquipmentType - elif type == "Medium": + elif type == 'Medium': CV = CVMediumType - elif type == "Method Type": + elif type == 'Method Type': CV = CVMethodType - elif type == "Organization Type": + elif type == 'Organization Type': CV = CVOrganizationType - elif type == "Property Data Type": + elif type == 'Property Data Type': CV = CVPropertyDataType - elif type == "Quality Code": + elif type == 'Quality Code': CV = CVQualityCode - elif type == "Relationship Type": + elif type == 'Relationship Type': CV = CVRelationshipType - elif type == "Result Type": + elif type == 'Result Type': CV = CVResultType - elif type == "Sampling Feature Geo-type": + elif type == 'Sampling Feature Geo-type': CV = CVSamplingFeatureGeoType - elif type == "Sampling Feature Type": + elif type == 'Sampling Feature Type': CV = CVSamplingFeatureType - elif type == "Site Type": + elif type == 'Site Type': CV = CVSiteType - elif type == "Spatial Offset Type": + elif type == 'Spatial Offset Type': CV = CVSpatialOffsetType - elif type == "Speciation": + elif type == 'Speciation': CV = CVSpeciation - elif type == "Specimen Type": + elif type == 'Specimen Type': CV = CVSpecimenType - elif type == "Status": + elif type == 'Status': CV = CVStatus - elif type == "Taxonomic Classifier Type": + elif type == 'Taxonomic Classifier Type': CV = CVTaxonomicClassifierType - elif type == "Units Type": + elif type == 'Units Type': CV = CVUnitsType - elif type == "Variable Name": + elif type == 'Variable Name': CV = CVVariableName - elif type == "Variable Type": + elif type == 'Variable Type': CV = CVVariableType else: return None try: return self._session.query(CV).all() except Exception as e: - print("Error running Query: %s" % e) + print('Error running Query: {}'.format(e)) - # ################################################################################ # Core - # ################################################################################ - def getDetailedAffiliationInfo(self): """ - getDetailedAffiliationInfo(self) - * Pass Nothing - Return a list of all Affiliations with detailed information, including Affiliation, People and Organization + * Pass Nothing - Return a list of all Affiliations with detailed information, + including Affiliation, People and Organization + """ q = self._session.query(Affiliations, People, Organizations) \ .filter(Affiliations.PersonID == People.PersonID) \ @@ -220,18 +230,26 @@ def getDetailedAffiliationInfo(self): return affiliationList def getDetailedResultInfo(self, resultTypeCV=None, resultID=None, sfID=None): - #TODO can this be done by just getting the result object and drilling down? what is the performance comparison + # TODO can this be done by just getting the result object and drilling down? + # What is the performance comparison. """ - getDetailedResultInfo(self, resultTypeCV=None, resultID=None, sfID=None) Get detailed information for all selected Results including , unit info, site info, method info , ProcessingLevel info. * Pass nothing - return a list of all objects * Pass resultTypeCV - All objects of given type * Pass a result ID - single object with the given result ID * Pass a SamplingFeatureID - All objects associated with the given sampling feature. + """ - q = self._session.query(Actions, Results, SamplingFeatures.SamplingFeatureCode, SamplingFeatures.SamplingFeatureName, Methods, Variables, - ProcessingLevels, Units).filter(Results.VariableID == Variables.VariableID) \ + q = self._session.query( + Actions, + Results, + SamplingFeatures.SamplingFeatureCode, + SamplingFeatures.SamplingFeatureName, + Methods, + Variables, + ProcessingLevels, + Units).filter(Results.VariableID == Variables.VariableID) \ .filter(Results.UnitsID == Units.UnitsID) \ .filter(Results.FeatureActionID == FeatureActions.FeatureActionID) \ .filter(FeatureActions.SamplingFeatureID == SamplingFeatures.SamplingFeatureID) \ @@ -244,124 +262,116 @@ def getDetailedResultInfo(self, resultTypeCV=None, resultID=None, sfID=None): if sfID: q = q.filter(SamplingFeatures.SamplingFeatureID == sfID) if resultID: - q = q.filter(Results.ResultID==resultID) + q = q.filter(Results.ResultID == resultID) for a, r, sc, sn, m, v, p, u in q.all(): - detailedResult = DetailedResult( \ - a, r, sc, sn, m, v, p, u) + detailedResult = DetailedResult( + a, r, sc, sn, m, v, p, u + ) resultList.append(detailedResult) return resultList - """ - Taxonomic Classifiers - """ - + # Taxonomic Classifiers def getTaxonomicClassifiers(self): """ getTaxonomicClassifiers(self): * Pass nothing - return a list of all objects + """ return self._session.query(TaxonomicClassifiers).all() - """ - Variable - """ - - def getVariables(self, ids=None, codes=None, sitecode=None, results= False): + # Variable + def getVariables(self, ids=None, codes=None, sitecode=None, results=False): """ - getVariables(self, ids=None, codes=None, sitecode=None, results= False): * Pass nothing - returns full list of variable objects * Pass a list of VariableID - returns a single variable object * Pass a list of VariableCode - returns a single variable object * Pass a SiteCode - returns a list of Variable objects that are collected at the given site. * Pass whether or not you want to return the sampling features that have results associated with them - """ + """ if sitecode: try: - vars = [x[0] for x in - self._session.query(distinct(Results.VariableID)) - .filter(Results.FeatureActionID == FeatureActions.FeatureActionID) - .filter(FeatureActions.SamplingFeatureID == SamplingFeatures.SamplingFeatureID) - .filter(SamplingFeatures.SamplingFeatureCode == sitecode).all() - ] - + variables = [ + x[0] for x in + self._session.query(distinct(Results.VariableID)) + .filter(Results.FeatureActionID == FeatureActions.FeatureActionID) + .filter(FeatureActions.SamplingFeatureID == SamplingFeatures.SamplingFeatureID) + .filter(SamplingFeatures.SamplingFeatureCode == sitecode).all() + ] if ids: - ids = list(set(ids).intersection(vars)) + ids = list(set(ids).intersection(variables)) else: - ids = vars + ids = variables except: pass - if results: try: - vars = [x[0] for x in self._session.query(distinct(Results.VariableID)).all()] + variables = [x[0] for x in self._session.query(distinct(Results.VariableID)).all()] if ids: - ids = list(set(ids).intersection(vars)) + ids = list(set(ids).intersection(variables)) else: - ids = vars + ids = variables except: pass query = self._session.query(Variables) - if ids: query = query.filter(Variables.VariableID.in_(ids)) - if codes: query = query.filter(Variables.VariableCode.in_(codes)) + if ids: + query = query.filter(Variables.VariableID.in_(ids)) + if codes: + query = query.filter(Variables.VariableCode.in_(codes)) try: return query.all() except Exception as e: - print("Error running Query: %s" % e) + print('Error running Query: {}'.format(e)) return None - """ - Method - """ - + # Method def getMethods(self, ids=None, codes=None, type=None): """ - getMethods(self, ids=None, codes=None, type=None): * Pass nothing - returns full list of method objects * Pass a list of MethodIDs - returns a single method object for each given id * Pass a list of MethodCode - returns a single method object for each given code * Pass a MethodType - returns a list of method objects of the given MethodType + """ q = self._session.query(Methods) - if ids: q = q.filter(Methods.MethodID.in_(ids)) - if codes: q = q.filter(Methods.MethodCode.in_(codes)) - if type: q = q.filter_by(MethodTypeCV=type) + if ids: + q = q.filter(Methods.MethodID.in_(ids)) + if codes: + q = q.filter(Methods.MethodCode.in_(codes)) + if type: + q = q.filter_by(MethodTypeCV=type) try: return q.all() except Exception as e: - print("Error running Query: %s" % e) + print('Error running Query: {}'.format(e)) return None - """ - ProcessingLevel - """ - + # ProcessingLevel def getProcessingLevels(self, ids=None, codes=None): """ getProcessingLevels(self, ids=None, codes=None) * Pass nothing - returns full list of ProcessingLevel objects * Pass a list of ProcessingLevelID - returns a single processingLevel object for each given id * Pass a list of ProcessingLevelCode - returns a single processingLevel object for each given code - """ + """ q = self._session.query(ProcessingLevels) - if ids: q = q.filter(ProcessingLevels.ProcessingLevelsID.in_(ids)) - if codes: q = q.filter(ProcessingLevels.ProcessingLevelCode.in_(codes)) + if ids: + q = q.filter(ProcessingLevels.ProcessingLevelsID.in_(ids)) + if codes: + q = q.filter(ProcessingLevels.ProcessingLevelCode.in_(codes)) try: return q.all() except Exception as e: - print("Error running Query: %s" % e) + print('Error running Query: {}'.format(e)) return None - """ - Sampling Feature - """ - + # Sampling Feature def getSamplingFeatures(self, ids=None, codes=None, uuids=None, type=None, wkt=None, results=False): """Retrieve a list of Sampling Feature objects. @@ -407,39 +417,41 @@ def getSamplingFeatures(self, ids=None, codes=None, uuids=None, type=None, wkt=N q = self._session.query(SamplingFeatures) - if type: q = q.filter_by(SamplingFeatureTypeCV=type) - if ids: q = q.filter(SamplingFeatures.SamplingFeatureID.in_(ids)) - if codes: q = q.filter(SamplingFeatures.SamplingFeatureCode.in_(codes)) - if uuids: q = q.filter(SamplingFeatures.SamplingFeatureUUID.in_(uuids)) - if wkt: q = q.filter_by(FeatureGeometryWKT=wkt) + if type: + q = q.filter_by(SamplingFeatureTypeCV=type) + if ids: + q = q.filter(SamplingFeatures.SamplingFeatureID.in_(ids)) + if codes: + q = q.filter(SamplingFeatures.SamplingFeatureCode.in_(codes)) + if uuids: + q = q.filter(SamplingFeatures.SamplingFeatureUUID.in_(uuids)) + if wkt: + q = q.filter_by(FeatureGeometryWKT=wkt) try: return q.all() except Exception as e: - print("Error running Query: %s" % e) + print('Error running Query: {}'.format(e)) return None - def getRelatedSamplingFeatures(self, sfid=None, rfid = None, relationshiptype=None): - #TODO: add functionality to filter by code + def getRelatedSamplingFeatures(self, sfid=None, rfid=None, relationshiptype=None): + # TODO: add functionality to filter by code """ - getRelatedSamplingFeatures(self, sfid=None, rfid = None, relationshiptype=None): - * Pass a SamplingFeatureID - get a list of sampling feature objects related to the input sampling feature + * Pass a SamplingFeatureID - get a list of sampling feature objects + related to the input sampling feature * Pass a RelatedFeatureID - get a list of Sampling features objects through the related feature * Pass a RelationshipTypeCV - get a list of sampling feature objects with the given type """ - # q = session.query(Address).select_from(User). \ - # join(User.addresses). \ - # filter(User.name == 'ed') - #throws an error when joining entire samplingfeature, works fine when just getting an element. this is being - # caused by the sampling feature inheritance - sf = self._session.query(distinct(SamplingFeatures.SamplingFeatureID))\ .select_from(RelatedFeatures) - if sfid: sf = sf.join(RelatedFeatures.RelatedFeatureObj).filter(RelatedFeatures.SamplingFeatureID == sfid) - if rfid: sf = sf.join(RelatedFeatures.SamplingFeatureObj).filter(RelatedFeatures.RelatedFeatureID == rfid) - if relationshiptype: sf = sf.filter(RelatedFeatures.RelationshipTypeCV == relationshiptype) + if sfid: + sf = sf.join(RelatedFeatures.RelatedFeatureObj).filter(RelatedFeatures.SamplingFeatureID == sfid) + if rfid: + sf = sf.join(RelatedFeatures.SamplingFeatureObj).filter(RelatedFeatures.RelatedFeatureID == rfid) + if relationshiptype: + sf = sf.filter(RelatedFeatures.RelationshipTypeCV == relationshiptype) try: sfids = [x[0] for x in sf.all()] if len(sfids) > 0: @@ -447,119 +459,116 @@ def getRelatedSamplingFeatures(self, sfid=None, rfid = None, relationshiptype=No return sflist except Exception as e: - print("Error running Query: %s" % e) + print('Error running Query: {}'.format(e)) return None - """ - Action - """ - + # Action def getActions(self, ids=None, type=None, sfid=None): """ - getActions(self, ids=None, type=None, sfid=None) * Pass nothing - returns a list of all Actions * Pass a list of Action ids - returns a list of Action objects * Pass a ActionTypeCV - returns a list of Action objects of that type - * Pass a SamplingFeature ID - returns a list of Action objects associated with that Sampling feature ID, Found through featureAction table - """ + * Pass a SamplingFeature ID - returns a list of Action objects + associated with that Sampling feature ID, Found through featureAction table + """ a = Actions - if type == "equipment": + if type == 'equipment': a = EquipmentActions - elif type == "calibration": + elif type == 'calibration': a = CalibrationActions - elif type == "maintenance": + elif type == 'maintenance': a = MaintenanceActions q = self._session.query(a) - if ids: q = q.filter(a.ActionID.in_(ids)) + if ids: + q = q.filter(a.ActionID.in_(ids)) if sfid: q = q.join(FeatureActions).filter(FeatureActions.SamplingFeatureID == sfid) try: return q.all() except Exception as e: - print("Error running Query: %s" % e) + print('Error running Query: {}'.format(e)) return None def getRelatedActions(self, actionid=None): """ - getRelatedActions(self, actionid=None) - * Pass an ActionID - get a list of Action objects related to the input action along with the relatinship type + * Pass an ActionID - get a list of Action objects related to the input + action along with the relationship type """ q = self._session.query(Actions).select_from(RelatedActions).join(RelatedActions.RelatedActionObj) - if actionid: q = q.filter(RelatedActions.ActionID == actionid) + if actionid: + q = q.filter(RelatedActions.ActionID == actionid) try: return q.all() except Exception as e: - print("Error running Query: %s" % e) + print('Error running Query: {}'.format(e)) return None - """ - Unit - """ - + # Unit def getUnits(self, ids=None, name=None, type=None): """ - getUnits(self, ids=None, name=None, type=None) * Pass nothing - returns a list of all units objects * Pass a list of UnitsID - returns a single units object for the given id * Pass UnitsName - returns a single units object * Pass a type- returns a list of all objects of the given type - """ + """ q = self._session.query(Units) - if ids: q = q.filter(Units.UnitsID.in_(ids)) - if name: q = q.filter(Units.UnitsName.ilike(name)) - if type: q = q.filter(Units.UnitsTypeCV.ilike(type)) + if ids: + q = q.filter(Units.UnitsID.in_(ids)) + if name: + q = q.filter(Units.UnitsName.ilike(name)) + if type: + q = q.filter(Units.UnitsTypeCV.ilike(type)) try: return q.all() except Exception as e: - print("Error running Query: %s" % e) + print('Error running Query: {}'.format(e)) return None - """ - Organization - """ - + # Organization def getOrganizations(self, ids=None, codes=None): """ - getOrganizations(self, ids=None, codes=None) * Pass nothing - returns a list of all organization objects * Pass a list of OrganizationID - returns a single organization object * Pass a list of OrganizationCode - returns a single organization object + """ q = self._session.query(Organizations) - if ids: q = q.filter(Organizations.OrganizationID.in_(ids)) - if codes: q = q.filter(Organizations.OrganizationCode.in_(codes)) + if ids: + q = q.filter(Organizations.OrganizationID.in_(ids)) + if codes: + q = q.filter(Organizations.OrganizationCode.in_(codes)) try: return q.all() except Exception as e: - print("Error running Query: %s" % e) + print('Error running Query: {}'.format(e)) return None - """ - Person - """ - + # Person def getPeople(self, ids=None, firstname=None, lastname=None): """ - getPeople(self, ids=None, firstname=None, lastname=None) * Pass nothing - returns a list of all People objects * Pass a list of PeopleID - returns a single People object * Pass a First Name - returns a single People object * Pass a Last Name - returns a single People object + """ q = self._session.query(People) - if ids: q = q.filter(People.PersonID.in_(ids)) - if firstname: q = q.filter(People.PersonFirstName.ilike(firstname)) - if lastname: q = q.filter(People.PersonLastName.ilike(lastname)) + if ids: + q = q.filter(People.PersonID.in_(ids)) + if firstname: + q = q.filter(People.PersonFirstName.ilike(firstname)) + if lastname: + q = q.filter(People.PersonLastName.ilike(lastname)) try: return q.all() except Exception as e: - print("Error running Query: %s" % e) + print('Error running Query: {}'.format(e)) return None def getAffiliations(self, ids=None, personfirst=None, personlast=None, orgcode=None): @@ -586,24 +595,22 @@ def getAffiliations(self, ids=None, personfirst=None, personlast=None, orgcode=N """ q = self._session.query(Affiliations) - if ids: q = q.filter(Affiliations.AffiliationID.in_(ids)) - if orgcode: q = q.join(Affiliations.OrganizationObj).filter( - Organizations.OrganizationCode.ilike(orgcode)) - if personfirst: q = q.join(Affiliations.PersonObj).filter( - People.PersonFirstName.ilike(personfirst)) - if personlast: q = q.join(Affiliations.PersonObj).filter( - People.PersonLastName.ilike(personlast)) + if ids: + q = q.filter(Affiliations.AffiliationID.in_(ids)) + if orgcode: + q = q.join(Affiliations.OrganizationObj).filter(Organizations.OrganizationCode.ilike(orgcode)) + if personfirst: + q = q.join(Affiliations.PersonObj).filter(People.PersonFirstName.ilike(personfirst)) + if personlast: + q = q.join(Affiliations.PersonObj).filter(People.PersonLastName.ilike(personlast)) try: return q.all() except Exception as e: - print("Error running Query: %s"%e) + print('Error running Query: {}'.format(e)) return None - """ - Results - """ - + # Results def getResults(self, ids=None, type=None, uuids=None, actionid=None, simulationid=None, sfid=None, variableid=None, siteid=None): @@ -639,43 +646,45 @@ def getResults(self, ids=None, type=None, uuids=None, actionid=None, simulationi >>> ReadODM2.getResults(actionid=20) """ - query = self._session.query(Results) - if type: query = query.filter_by(ResultTypeCV=type) - if variableid: query = query.filter_by(VariableID=variableid) - if ids: query = query.filter(Results.ResultID.in_(ids)) - if uuids: query = query.filter(Results.ResultUUID.in_(uuids)) - if simulationid: query = query.join(FeatureActions)\ - .join(Actions)\ - .join(Simulations)\ - .filter_by(SimulationID=simulationid) - if actionid: query = query.join(FeatureActions).filter_by(ActionID=actionid) - if sfid: query = query.join(FeatureActions).filter_by(SamplingFeatureID=sfid) + if type: + query = query.filter_by(ResultTypeCV=type) + if variableid: + query = query.filter_by(VariableID=variableid) + if ids: + query = query.filter(Results.ResultID.in_(ids)) + if uuids: + query = query.filter(Results.ResultUUID.in_(uuids)) + if simulationid: + query = query.join(FeatureActions)\ + .join(Actions)\ + .join(Simulations)\ + .filter_by(SimulationID=simulationid) + if actionid: + query = query.join(FeatureActions).filter_by(ActionID=actionid) + if sfid: + query = query.join(FeatureActions).filter_by(SamplingFeatureID=sfid) if siteid: - sfids = [x[0] for x in self._session.query(distinct(SamplingFeatures.SamplingFeatureID)) - .select_from(RelatedFeatures) - .join(RelatedFeatures.SamplingFeatureObj) - .filter(RelatedFeatures.RelatedFeatureID == siteid) - #.filter(RelatedFeatures.RelationshipTypeCV == "Was Collected at") - .all()] + sfids = [x[0] for x in self._session.query( + distinct(SamplingFeatures.SamplingFeatureID)) + .select_from(RelatedFeatures) + .join(RelatedFeatures.SamplingFeatureObj) + .filter(RelatedFeatures.RelatedFeatureID == siteid) + .all() + ] query = query.join(FeatureActions).filter(FeatureActions.SamplingFeatureID.in_(sfids)) try: return query.all() except Exception as e: - print("Error running Query: %s" % e) + print('Error running Query: {}'.format(e)) return None - - """ - Datasets - """ - + # Datasets def getDataSets(self, codes=None, uuids=None): """ - getDataSets(self, codes=None, uuids=None) * Pass nothing - returns a list of all DataSet objects * Pass a list of DataSetCode - returns a single DataSet object for each code * Pass a list of UUIDS - returns a single DataSet object for each UUID @@ -688,16 +697,12 @@ def getDataSets(self, codes=None, uuids=None): try: return q.all() except Exception as e: - print("Error running Query %s" % e) + print('Error running Query {}'.format(e)) return None - # ################################################################################ # Data Quality - # ################################################################################ - def getDataQuality(self): """ - getDataQuality(self) * Pass nothing - return a list of all objects """ return self._session.query(DataQuality).all() @@ -705,197 +710,190 @@ def getDataQuality(self): # TODO DataQuality Schema Queries def getReferenceMaterials(self): """ - getReferenceMaterials(self) * Pass nothing - return a list of all objects """ return self._session.query(ReferenceMaterials).all() def getReferenceMaterialValues(self): """ - getReferenceMaterialValues(self) * Pass nothing - return a list of all objects """ return self._session.query(ReferenceMaterialValues).all() def getResultNormalizationValues(self): """ - getResultNormalizationValues(self) * Pass nothing - return a list of all objects """ return self._session.query(ResultNormalizationValues).all() def getResultsDataQuality(self): """ - getResultsDataQuality(self) * Pass nothing - return a list of all objects """ return self._session.query(ResultsDataQuality).all() - # ################################################################################ - # Equipment - # ################################################################################ - # TODO Equipment Schema Queries + # Equipment def getEquipment(self, codes=None, type=None, sfid=None, actionid=None): """ - getEquipment(self, codes=None, type=None, sfid=None, actionid=None) * Pass nothing - returns a list of all Equipment objects * Pass a list of EquipmentCodes- return a list of all Equipment objects that match each of the codes * Pass a EquipmentType - returns a single Equipment object * Pass a SamplingFeatureID - returns a single Equipment object * Pass an ActionID - returns a single Equipment object + """ e = self._session.query(Equipment) - if sfid: e = e.join(EquipmentUsed) \ - .join(Actions) \ - .join(FeatureActions) \ - .filter(FeatureActions.SamplingFeatureID == sfid) - if codes: e = e.filter(Equipment.EquipmentCode.in_(codes)) - if actionid: e = e.join(EquipmentUsed).join(Actions) \ - .filter(Actions.ActionID == actionid) + if sfid: + e = e.join(EquipmentUsed) \ + .join(Actions) \ + .join(FeatureActions) \ + .filter(FeatureActions.SamplingFeatureID == sfid) + if codes: + e = e.filter(Equipment.EquipmentCode.in_(codes)) + if actionid: + e = e.join(EquipmentUsed).join(Actions) \ + .filter(Actions.ActionID == actionid) return e.all() def CalibrationReferenceEquipment(self): """ - CalibrationReferenceEquipment(self) * Pass nothing - return a list of all objects + """ return self._session.query(CalibrationReferenceEquipment).all() def CalibrationStandards(self): """ - CalibrationStandards(self) * Pass nothing - return a list of all objects + """ return self._session.query(CalibrationStandards).all() def DataloggerFileColumns(self): """ - DataloggerFileColumns(self) * Pass nothing - return a list of all objects + """ return self._session.query(DataLoggerFileColumns).all() def DataLoggerFiles(self): """ - DataLoggerFiles(self) * Pass nothing - return a list of all objects + """ return self._session.query(DataLoggerFiles).all() def DataloggerProgramFiles(self): """ - DataloggerProgramFiles(self) * Pass Nothing - return a list of all objects + """ return self._session.query(DataLoggerProgramFiles).all() def EquipmentModels(self): """ - EquipmentModels(self) * Pass Nothing - return a list of all objects + """ return self._session.query(EquipmentModels).all() def EquipmentUsed(self): """ - EquipmentUsed(self) * Pass Nothing - return a list of all objects + """ return self._session.query(EquipmentUsed).all() def InstrumentOutputVariables(self, modelid=None, variableid=None): """ - InstrumentOutputVariables(self, modelid=None, variableid=None) * Pass Nothing - return a list of all objects * Pass ModelID * Pass VariableID + """ i = self._session.query(InstrumentOutputVariables) - if modelid: i = i.filter_by(ModelID=modelid) - if variableid: i = i.filter_by(VariableID=variableid) + if modelid: + i = i.filter_by(ModelID=modelid) + if variableid: + i = i.filter_by(VariableID=variableid) return i.all() def RelatedEquipment(self, code=None): """ - RelatedEquipment(self, code=None) * Pass nothing - return a list of all objects * Pass code- return a single object with the given code + """ r = self._session.query(RelatedEquipment) - if code: r = r.filter_by(EquipmentCode=code) + if code: + r = r.filter_by(EquipmentCode=code) return r.all() - # ################################################################################ # Extension Properties - # ################################################################################ - def getExtensionProperties(self, type=None): """ - getExtensionProperties(self, type=None) * Pass nothing - return a list of all objects * Pass type- return a list of all objects of the given type + """ # Todo what values to use for extensionproperties type e = ExtensionProperties - if type == "action": + if type == 'action': e = ActionExtensionPropertyValues - elif type == "citation": + elif type == 'citation': e = CitationExtensionPropertyValues - elif type == "method": + elif type == 'method': e = MethodExtensionPropertyValues - elif type == "result": + elif type == 'result': e = ResultExtensionPropertyValues - elif type == "samplingfeature": + elif type == 'samplingfeature': e = SamplingFeatureExtensionPropertyValues - elif type == "variable": + elif type == 'variable': e = VariableExtensionPropertyValues try: return self._session.query(e).all() except Exception as e: - print("Error running Query: %s" % e) + print('Error running Query: {}'.format(e)) return None - # ################################################################################ # External Identifiers - # ################################################################################ def getExternalIdentifiers(self, type=None): """ - getExternalIdentifiers(self, type=None) * Pass nothing - return a list of all objects * Pass type- return a list of all objects of the given type + """ e = ExternalIdentifierSystems - if type.lowercase == "citation": + if type.lowercase == 'citation': e = CitationExternalIdentifiers - elif type == "method": + elif type == 'method': e = MethodExternalIdentifiers - elif type == "person": + elif type == 'person': e = PersonExternalIdentifiers - elif type == "referencematerial": + elif type == 'referencematerial': e = ReferenceMaterialExternalIdentifiers - elif type == "samplingfeature": + elif type == 'samplingfeature': e = SamplingFeatureExternalIdentifiers - elif type == "spatialreference": + elif type == 'spatialreference': e = SpatialReferenceExternalIdentifiers - elif type == "taxonomicclassifier": + elif type == 'taxonomicclassifier': e = TaxonomicClassifierExternalIdentifiers - elif type == "variable": + elif type == 'variable': e = VariableExternalIdentifiers try: return self._session.query(e).all() except Exception as e: - print("Error running Query: %s" % e) + print('Error running Query: {}'.format(e)) return None - # ################################################################################ - # Lab Analyses - # ################################################################################ # TODO functions for Lab Analyses + # Lab Analyses def getDirectives(self): """ getDirectives(self) * Pass nothing - return a list of all objects + """ return self._session.query(Directives).all() @@ -903,6 +901,7 @@ def getActionDirectives(self): """ getActionDirectives(self) * Pass nothing - return a list of all objects + """ return self._session.query(ActionDirectives).all() @@ -910,18 +909,17 @@ def getSpecimenBatchPositions(self): """ getSpecimenBatchPositions(self) * Pass nothing - return a list of all objects + """ return self._session.query(SpecimenBatchPositions).all() - # ################################################################################ - # Provenance - # ################################################################################ - # TODO functions for Provenance + # Provenance def getAuthorLists(self): """ getAuthorLists(self) * Pass nothing - return a list of all objects + """ return self._session.query(AuthorLists).all() @@ -929,6 +927,7 @@ def getDatasetCitations(self): """ getDatasetCitations(self) * Pass nothing - return a list of all objects + """ return self._session.query(DataSetCitations).all() @@ -936,6 +935,7 @@ def getDerivationEquations(self): """ getDerivationEquations(self) * Pass nothing - return a list of all objects + """ return self._session.query(DerivationEquations).all() @@ -943,6 +943,7 @@ def getMethodCitations(self): """ getMethodCitations(self) * Pass nothing - return a list of all objects + """ return self._session.query(MethodCitations).all() @@ -950,14 +951,15 @@ def getRelatedAnnotations(self): """ getRelatedAnnotations(self) * Pass nothing - return a list of all objects + """ - # q= read._session.query(Actions).select_from(RelatedActions).join(RelatedActions.RelatedActionObj) return self._session.query(RelatedAnnotations).all() def getRelatedCitations(self): """ getRelatedCitations(self) * Pass nothing - return a list of all objects + """ return self._session.query(RelatedCitations).all() @@ -965,6 +967,7 @@ def getRelatedDatasets(self): """ getRelatedDatasets(self) * Pass nothing - return a list of all objects + """ return self._session.query(RelatedDataSets).all() @@ -972,6 +975,7 @@ def getRelatedResults(self): """ getRelatedResults(self) * Pass nothing - return a list of all objects + """ return self._session.query(RelatedResults).all() @@ -979,114 +983,116 @@ def getResultDerivationEquations(self): """ getResultDerivationEquations(self) * Pass nothing - return a list of all objects + """ return self._session.query(ResultDerivationEquations).all() - # ################################################################################ # Results - # ################################################################################ - - """ - ResultValues - """ - + # ResultValues def getResultValues(self, resultids, starttime=None, endtime=None): """ getResultValues(self, resultids, starttime=None, endtime=None) - * Pass in a list of ResultID - Returns a pandas dataframe object of type that is specific to the result type - - The resultids must be associated with the same value type - * Pass a ResultID and a date range - returns a pandas dataframe object of type that is specific to the result type with values between the input date range + * Pass in a list of ResultID - Returns a pandas dataframe object of type + that is specific to the result type - The resultids must be associated + with the same value type + * Pass a ResultID and a date range - returns a pandas dataframe object + of type that is specific to the result type with values between the input date range * Pass a starttime - Returns a dataframe with the values after the given start time * Pass an endtime - Returns a dataframe with the values before the given end time + """ - type= self._session.query(Results).filter_by(ResultID=resultids[0]).first().ResultTypeCV + type = self._session.query(Results).filter_by(ResultID=resultids[0]).first().ResultTypeCV ResultType = TimeSeriesResults - if "categorical" in type.lower():ResultType = CategoricalResultValues - elif "measurement" in type.lower():ResultType = MeasurementResultValues - elif "point" in type.lower():ResultType = PointCoverageResultValues - elif "profile" in type.lower():ResultType = ProfileResultValues - elif "section" in type.lower():ResultType = SectionResults - elif "spectra" in type.lower():ResultType = SpectraResultValues - elif "time" in type.lower():ResultType = TimeSeriesResultValues - elif "trajectory" in type.lower():ResultType = TrajectoryResultValues - elif "transect" in type.lower():ResultType = TransectResultValues - - # q.filter(Affiliations.AffiliationID.in_(ids)) + if 'categorical' in type.lower(): + ResultType = CategoricalResultValues + elif 'measurement' in type.lower(): + ResultType = MeasurementResultValues + elif 'point' in type.lower(): + ResultType = PointCoverageResultValues + elif 'profile' in type.lower(): + ResultType = ProfileResultValues + elif 'section' in type.lower(): + ResultType = SectionResults + elif 'spectra' in type.lower(): + ResultType = SpectraResultValues + elif 'time' in type.lower(): + ResultType = TimeSeriesResultValues + elif 'trajectory' in type.lower(): + ResultType = TrajectoryResultValues + elif 'transect' in type.lower(): + ResultType = TransectResultValues q = self._session.query(ResultType).filter(ResultType.ResultID.in_(resultids)) - if starttime: q = q.filter(ResultType.ValueDateTime >= starttime) - if endtime: q = q.filter(ResultType.ValueDateTime <= endtime) + if starttime: + q = q.filter(ResultType.ValueDateTime >= starttime) + if endtime: + q = q.filter(ResultType.ValueDateTime <= endtime) try: - vals = q.order_by(ResultType.ValueDateTime) - # df = pd.DataFrame([dv.list_repr() for dv in vals.all()]) - # df.columns = vals[0].get_columns() - + # F841 local variable 'vals' is assigned to but never used + # vals = q.order_by(ResultType.ValueDateTime) query = q.statement.compile(dialect=self._session_factory.engine.dialect) - df = pd.read_sql_query(sql=query, - con=self._session_factory.engine, - params=query.params) + df = pd.read_sql_query( + sql=query, + con=self._session_factory.engine, + params=query.params + ) return df except Exception as e: - print("Error running Query: %s" % e) + print('Error running Query: {}'.format(e)) return None - # ################################################################################ # SamplingFeatures - # ################################################################################ - - """ - Site - """ - + # Site def getSpatialReferences(self, srsCodes=None): """ getSpatialReferences(self, srsCodes=None) * Pass nothing - return a list of all Spatial References * Pass in a list of SRS Codes- + """ q = self._session.query(SpatialReferences) - if srsCodes: q.filter(SpatialReferences.SRSCode.in_(srsCodes)) + if srsCodes: + q.filter(SpatialReferences.SRSCode.in_(srsCodes)) try: return q.all() except Exception as e: - print("Error running Query: %s" % e) + print('Error running Query: {}'.format(e)) return None - - # ################################################################################ # Simulation - # ################################################################################ - def getSimulations(self, name=None, actionid=None): """ getSimulations(self, name=None, actionid=None) * Pass nothing - get a list of all converter simuation objects * Pass a SimulationName - get a single simulation object * Pass an ActionID - get a single simulation object + """ s = self._session.query(Simulations) - if name: s = s.filter(Simulations.SimulationName.ilike(name)) - if actionid: s = s.filter_by(ActionID=actionid) + if name: + s = s.filter(Simulations.SimulationName.ilike(name)) + if actionid: + s = s.filter_by(ActionID=actionid) try: return s.all() except Exception as e: - print("Error running Query: %s" % e) + print('Error running Query: {}'.format(e)) return None - - def getModels(self, codes=None): """ getModels(self, codes=None) * Pass nothing - return a list of all Model Objects * Pass a list of ModelCodes - get a list of converter objects related to the converter having ModeCode + """ m = self._session.query(Models) - if codes: m = m.filter(Models.ModelCode.in_(codes)) + if codes: + m = m.filter(Models.ModelCode.in_(codes)) try: return m.all() except Exception as e: - print("Error running Query: %s" % e) + print('Error running Query: {}'.format(e)) return None def getRelatedModels(self, id=None, code=None): @@ -1094,25 +1100,16 @@ def getRelatedModels(self, id=None, code=None): getRelatedModels(self, id=None, code=None) * Pass a ModelID - get a list of converter objects related to the converter having ModelID * Pass a ModelCode - get a list of converter objects related to the converter having ModeCode - """ -# cdoe from master -#+ # note this was RelatedModels.RelatedModelID == Models.ModelID which would return all Parent models of RelatedModelID -# + self._session.query(RelatedModels).filter_by(ModelID=modelid).all() -# + self._session.query(RelatedModels).join(Models, RelatedModels.ModelID == Models.ModelID).filter(Models.ModelCode == modelcode).all() + """ m = self._session.query(Models).select_from(RelatedModels).join(RelatedModels.ModelObj) - if id: m = m.filter(RelatedModels.ModelID == id) - if code: m = m.filter(Models.ModelCode == code) - -#previous version of code - # m = self._session.query(Models).select_from(RelatedModels).join(RelatedModels.RelatedModelObj) - # if id: m = m.filter(RelatedModels.ModelID == id) - # if code: m = m.filter(RelatedModels.ModelCode == code) + if id: + m = m.filter(RelatedModels.ModelID == id) + if code: + m = m.filter(Models.ModelCode == code) try: return m.all() except Exception as e: - print("Error running Query: %s" % e) + print('Error running Query: {}'.format(e)) return None - - diff --git a/odm2api/ODM2/services/updateService.py b/odm2api/ODM2/services/updateService.py index 9cee854..1bec3ce 100644 --- a/odm2api/ODM2/services/updateService.py +++ b/odm2api/ODM2/services/updateService.py @@ -1,3 +1,5 @@ +from __future__ import (absolute_import, division, print_function) + __author__ = 'jmeline' from datetime import datetime @@ -126,4 +128,3 @@ def updateAction(self, actionID=None, begin=None, end=None, action = None): # ################################################################################ # ODM2 # ################################################################################ - diff --git a/odm2api/ODMconnection.py b/odm2api/ODMconnection.py index e1baeb0..c3c1833 100644 --- a/odm2api/ODMconnection.py +++ b/odm2api/ODMconnection.py @@ -1,52 +1,52 @@ +from __future__ import (absolute_import, division, print_function) -from sqlalchemy.exc import SQLAlchemyError, DBAPIError -from sqlalchemy import create_engine -from sqlalchemy.orm import sessionmaker, scoped_session - -from odm2api.ODM2.models import Variables as Variable2, setSchema - - -import urllib -import sys import os +import sys +try: + from urllib import quote_plus +except ImportError: + from urllib.parse import quote_plus +from odm2api.ODM2.models import setSchema + +from sqlalchemy import create_engine +from sqlalchemy.orm import scoped_session, sessionmaker -# LIBSPATIALITE_PATH = './libspatialite.so.5.1.0' class SessionFactory(): def __init__(self, connection_string, echo=True, version=2.0): if 'sqlite' in connection_string: - self.engine = create_engine(connection_string, encoding='utf-8', echo=echo, pool_recycle=100)#, pool_pre_ping=True) + self.engine = create_engine(connection_string, encoding='utf-8', echo=echo, pool_recycle=100) self.test_engine = self.engine elif 'mssql' in connection_string: - self.engine = create_engine(connection_string, encoding='utf-8', echo=echo, pool_recycle=100)#, pool_pre_ping =True) - self.test_engine = create_engine(connection_string, encoding='utf-8', echo=echo, connect_args={'timeout': 1}) + self.engine = create_engine(connection_string, encoding='utf-8', echo=echo, pool_recycle=100) + self.test_engine = create_engine(connection_string, encoding='utf-8', + echo=echo, connect_args={'timeout': 1}) elif 'postgresql' in connection_string or 'mysql' in connection_string: - self.engine = create_engine(connection_string, encoding='utf-8', echo=echo, pool_recycle=100)# , pool_pre_ping=True) + self.engine = create_engine(connection_string, encoding='utf-8', echo=echo, pool_recycle=100) self.test_engine = create_engine(connection_string, encoding='utf-8', echo=echo, - max_overflow=0, connect_args={'connect_timeout': 1}) - + max_overflow=0, connect_args={'connect_timeout': 1}) - # Create session maker + # Create session maker. self.Session = scoped_session(sessionmaker(bind=self.engine, autoflush=True)) self.test_Session = scoped_session(sessionmaker(bind=self.test_engine)) setSchema(self.engine) - self.version=version + self.version = version def getSession(self): return self.Session() def __repr__(self): - return "" % self.engine + return '' % self.engine class dbconnection(): def __init__(self, debug=False): self.debug = debug self.version = -1 - self._connection_format = "%s+%s://%s:%s@%s/%s" - self._connection_format_nopassword = "%s+%s://%s@%s/%s" + self._connection_format = '%s+%s://%s:%s@%s/%s' + self._connection_format_nopassword = '%s+%s://%s@%s/%s' @classmethod def createConnection(self, engine, address, db=None, user=None, password=None, dbtype=2.0, echo=False): @@ -54,9 +54,10 @@ def createConnection(self, engine, address, db=None, user=None, password=None, d if engine == 'sqlite': connection_string = engine + ':///' + address return self.createConnectionFromString(connection_string, dbtype, echo) - else: - connection_string = dbconnection.__buildConnectionString(dbconnection(), engine, address, db, user, password) + connection_string = dbconnection.__buildConnectionString( + dbconnection(), engine, address, db, user, password + ) if self.isValidConnection(connection_string, dbtype): return self.createConnectionFromString(connection_string, dbtype, echo) else: @@ -87,9 +88,9 @@ def testEngine(self, connection_string, echo=False): try: setSchema(s.test_engine) # s.test_Session().query(Variable2.VariableCode).limit(1).first() - s.test_Session().execute("Select 1") + s.test_Session().execute('Select 1') except Exception as e: - print("Connection was unsuccessful ", e.message) + print('Connection was unsuccessful {}'.format(e.message)) return False finally: dbconnection.closeConnection(s.test_Session) @@ -100,16 +101,15 @@ def testEngine1_1(self, connection_string, echo=False): s = SessionFactory(connection_string, echo=echo, version=1.1) try: # s.test_Session().query(ODM.Variable.code).limit(1).first() - s.test_Session().execute("Select 1") + s.test_Session().execute('Select 1') except Exception as e: - print("Connection was unsuccessful ", e.message) + print('Connection was unsuccessful {}'.format(e.message)) return False finally: dbconnection.closeConnection(s.test_Session) return True - @classmethod def buildConnectionString(self, engine, address, db, user, password): return dbconnection.__buildConnectionString(dbconnection(), engine, address, db, user, password) @@ -125,27 +125,27 @@ def closeConnection(self, session): def __buildConnectionString(self, engine=None, address=None, db=None, user=None, password=None): if engine == 'mssql' and sys.platform != 'win32': - driver = "pyodbc" - quoted = urllib.quote_plus('DRIVER={FreeTDS};DSN=%s;UID=%s;PWD=%s;' % (address, user, password)) + driver = 'pyodbc' + quoted = quote_plus('DRIVER={FreeTDS};DSN=%s;UID=%s;PWD=%s;' % (address, user, password)) conn_string = 'mssql+pyodbc:///?odbc_connect={}'.format(quoted) elif engine == 'sqlite': driver = 'sqlite' - conn_string = "%s:///%s" % (driver, address) + conn_string = '%s:///%s' % (driver, address) else: if engine == 'mssql': - driver = "pyodbc" - conn = "%s+%s://%s:%s@%s/%s?driver=SQL+Server" - if "sqlncli11.dll" in os.listdir("C:\\Windows\\System32"): - conn = "%s+%s://%s:%s@%s/%s?driver=SQL+Server+Native+Client+11.0" + driver = 'pyodbc' + conn = '%s+%s://%s:%s@%s/%s?driver=SQL+Server' + if 'sqlncli11.dll' in os.listdir('C:\\Windows\\System32'): + conn = '%s+%s://%s:%s@%s/%s?driver=SQL+Server+Native+Client+11.0' self._connection_format = conn conn_string = self._connection_format % (engine, driver, user, password, address, db) else: if engine == 'mysql': - driver = "pymysql" + driver = 'pymysql' elif engine == 'postgresql': - driver = "psycopg2" + driver = 'psycopg2' else: - driver = "None" + driver = 'None' conn_string = self.constringBuilder(engine, address, db, user, password, driver) return conn_string diff --git a/odm2api/__init__.py b/odm2api/__init__.py index bd256c4..830dc2e 100644 --- a/odm2api/__init__.py +++ b/odm2api/__init__.py @@ -1,10 +1,13 @@ +from __future__ import (absolute_import, division, print_function) + from odm2api.ODMconnection import SessionFactory, dbconnection from odm2api.base import serviceBase __all__ = [ 'SessionFactory', 'dbconnection', - 'serviceBase',] + 'serviceBase', +] from ._version import get_versions __version__ = get_versions()['version'] diff --git a/odm2api/base.py b/odm2api/base.py index b15321a..3184cf7 100644 --- a/odm2api/base.py +++ b/odm2api/base.py @@ -1,32 +1,15 @@ - - - +from __future__ import (absolute_import, division, print_function) class serviceBase(object): - - # __metaclass__ = SingletonByConn - - ''' - def __init__(self, session): - self._session = session - ''' def __init__(self, session_factory, debug=False): - ''' - must send in either a session_factory #TODO or a connection, exclusive or - ''' + """Must send in either a session_factory.""" - # if connection is None: self._session_factory = session_factory - # else: - # self._session_factory = SessionFactory(connection) self._session = self._session_factory.getSession() self._version = session_factory.version self._debug = debug - - - def getSession(self): if self._session is None: self._session = self._session_factory.getSession() @@ -34,10 +17,7 @@ def getSession(self): return self._session def reset_session(self): - self._session =self._session_factory.getSession() #reset the session in order to prevent memory leaks - - - + self._session = self._session_factory.getSession() class Base(object): @@ -50,7 +30,8 @@ def __tablename__(cls): __table_args__ = {u'schema': 'odm2'} def __init__(self, *args, **kwargs): - for name, value in kwargs.items(): setattr(self, name, value) + for name, value in kwargs.items(): + setattr(self, name, value) def __eq__(self, other): return self.__dict__ == other.__dict__ @@ -58,23 +39,12 @@ def __eq__(self, other): def __repr__(self): valuedict = self.__dict__.copy() for v in valuedict.keys(): - if "obj" in v.lower(): + if 'obj' in v.lower(): del valuedict[v] # del valuedict["_sa_instance_state"] - return "<%s(%s)>" % (self.__class__.__name__, str(valuedict)) + return '<%s(%s)>' % (self.__class__.__name__, str(valuedict)) class modelBase(): from sqlalchemy.ext.declarative import declarative_base Base = declarative_base(cls=Base) - - - - - - - - - - - diff --git a/setup.py b/setup.py index 6313168..309ef6c 100644 --- a/setup.py +++ b/setup.py @@ -1,3 +1,5 @@ +from __future__ import (absolute_import, division, print_function) + """A setuptools based setup module. See: diff --git a/tests/__init__.py b/tests/__init__.py index 8b13789..78f3bf2 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1 +1 @@ - +from __future__ import (absolute_import, division, print_function) diff --git a/tests/schemas/__init__.py b/tests/schemas/__init__.py index e69de29..78f3bf2 100644 --- a/tests/schemas/__init__.py +++ b/tests/schemas/__init__.py @@ -0,0 +1 @@ +from __future__ import (absolute_import, division, print_function) diff --git a/tests/schemas/postgresql/__init__.py b/tests/schemas/postgresql/__init__.py index e69de29..78f3bf2 100644 --- a/tests/schemas/postgresql/__init__.py +++ b/tests/schemas/postgresql/__init__.py @@ -0,0 +1 @@ +from __future__ import (absolute_import, division, print_function) diff --git a/tests/schemas/postgresql/olderversions/DbWrench_DDL_postprocess.py b/tests/schemas/postgresql/olderversions/DbWrench_DDL_postprocess.py index 0c9de9b..6a964d7 100644 --- a/tests/schemas/postgresql/olderversions/DbWrench_DDL_postprocess.py +++ b/tests/schemas/postgresql/olderversions/DbWrench_DDL_postprocess.py @@ -1,3 +1,5 @@ +from __future__ import (absolute_import, division, print_function) + """ DbWrench_DDL_postprocess.py Emilio Mayorga (UW/APL) 8/15-18/2014 @@ -5,7 +7,7 @@ generate a new, blank ODM2 database following ODM2 conventions. Specifically: 1. All entity names will be lowercase 2. All entities will be under a single schema -3. The field samplingfeatures.featuregeometry will be PostGIS geometry field constrained +3. The field samplingfeatures.featuregeometry will be PostGIS geometry field constrained to be 2D, but otherwise free to store any project (eg, epsg:4326) and to accept any geometry type (point, line, polygon, and collections thereof [multi-polygon, etc]) @@ -26,20 +28,20 @@ # =============== USER (run-time) CHANGES ================= # DDL input file name -ddlfile = "ODM2_DDL_for_PostgreSQL9.3PostGIS2.1.sql" +ddlfile = 'ODM2_DDL_for_PostgreSQL9.3PostGIS2.1.sql' # DDL output file name -ddlppfile = "ODM2_DDL_for_PostgreSQL9.3PostGIS2.1_postprocessed.sql" +ddlppfile = 'ODM2_DDL_for_PostgreSQL9.3PostGIS2.1_postprocessed.sql' -newschemaname = "odm2" -odmversion = "2.0" -odm2infodct = {'schema':newschemaname, 'version':odmversion} +newschemaname = 'odm2' +odmversion = '2.0' +odm2infodct = {'schema': newschemaname, 'version': odmversion} # ========================================================= pre_block = """ /* Post-processed DDL based on DbWrench export. 2014-8-18 10pm PDT */ --- IF THIS DDL SCRIPT IS TO *CREATE* THE DATABASE ITSELF, --- WILL NEED TO FIRST KNOW THE DATABASE NAME AND ROLES TO BE USED. +-- IF THIS DDL SCRIPT IS TO *CREATE* THE DATABASE ITSELF, +-- WILL NEED TO FIRST KNOW THE DATABASE NAME AND ROLES TO BE USED. /* Add single base schema for all odm2 entities */ CREATE SCHEMA %(schema)s; @@ -48,14 +50,15 @@ post_block = """/* ** Set up samplingfeatures.featuregeometry as a heterogeneous, 2D PostGIS geom field. */ ALTER TABLE %(schema)s.samplingfeatures ALTER COLUMN featuregeometry TYPE geometry; -ALTER TABLE %(schema)s.samplingfeatures ADD CONSTRAINT +ALTER TABLE %(schema)s.samplingfeatures ADD CONSTRAINT enforce_dims_featuregeometry CHECK (st_ndims(featuregeometry) = 2); CREATE INDEX idx_samplingfeature_featuregeom ON %(schema)s.samplingfeatures USING gist (featuregeometry); -- Populate and tweak geometry_columns SELECT Populate_Geometry_Columns(); -- But it assigned a POINT type to %(schema)s.samplingfeatures. Need instead to use the generic -- 'geometries', to accept any type (point, line, polygon, and collections thereof [multi-polygon, etc]) -UPDATE public.geometry_columns SET type = 'GEOMETRY' WHERE f_table_schema = '%(schema)s' AND f_table_name = 'samplingfeatures'; +UPDATE public.geometry_columns SET +type = 'GEOMETRY' WHERE f_table_schema = '%(schema)s' AND f_table_name = 'samplingfeatures'; """ % odm2infodct # Relies on these assumptions: @@ -78,10 +81,10 @@ # Insert pre and post blocks, and the modified DDL lines in between ddl_ppf = open(ddlppfile, 'w') ddl_ppf.write(pre_block) -ddl_ppf.write("/* ================================================================\n") -ddl_ppf.write(" ================================================================ */\n\n") +ddl_ppf.write('/* ================================================================\n') +ddl_ppf.write(' ================================================================ */\n\n') ddl_ppf.writelines(ddl_pp_lines) -ddl_ppf.write("\n/* ================================================================\n") -ddl_ppf.write(" ================================================================ */\n\n") +ddl_ppf.write('\n/* ================================================================\n') +ddl_ppf.write(' ================================================================ */\n\n') ddl_ppf.write(post_block) ddl_ppf.close() diff --git a/tests/schemas/postgresql/olderversions/__init__.py b/tests/schemas/postgresql/olderversions/__init__.py index e69de29..78f3bf2 100644 --- a/tests/schemas/postgresql/olderversions/__init__.py +++ b/tests/schemas/postgresql/olderversions/__init__.py @@ -0,0 +1 @@ +from __future__ import (absolute_import, division, print_function) diff --git a/tests/test_SessionFactory.py b/tests/test_SessionFactory.py index f128ace..adcb446 100644 --- a/tests/test_SessionFactory.py +++ b/tests/test_SessionFactory.py @@ -1,72 +1,48 @@ -__author__ = 'valentine' +from __future__ import (absolute_import, division, print_function) + +from odm2api.ODM2.models import CVElevationDatum, setSchema from odm2api.ODMconnection import SessionFactory -from odm2api.ODM2.models import * + import pytest -from sqlalchemy.engine import reflection -# assumes that pytest is being run from ODM2PythonAPI director -# [name, driver, connectionstring ] +__author__ = 'valentine' + + dbs_readonly = [ - # ['mysql', 'localhost', 'odm2', 'ODM', 'odm'], ['mysql:ODM@Localhost/', 'mysql', 'mysql+pymysql://ODM:odm@localhost/'], ['mysql"root@Localhost/', 'mysql', 'mysql+pymysql://root@localhost/'], ['mysql:ODM@Localhost/odm2', 'mysql', 'mysql+pymysql://ODM:odm@localhost/odm2'], ['mysql"root@Localhost/odm2', 'mysql', 'mysql+pymysql://root@localhost/odm2'], - # [' mysql + mysqldb:', 'mysql', 'mysql+mysqldb://root@localhost/odm2'], - #'mysql+pymysql://ODM:odm@127.0.0.1/odm2' - ['postgresql_marchantariats_none', 'postgresql', 'postgresql+psycopg2://postgres:None@localhost/marchantariats', 'marchantariats', 'postgres', None], - ['postgresql_marchantariats_empty', 'postgresql', 'postgresql+psycopg2://postgres@localhost/marchantariats', 'marchantariats', 'postgres', None], - #'postgresql+psycopg2://postgres:None@localhost/marchantariats' - - # ["mssql_pyodbc_azure", "mssql", "mssql+pyodbc:///?odbc_connect=DRIVER%3D%7BFreeTDS%7D%3BDSN%3Dazure%3BUID%3Dweb%3BPWD%3D1Forgetit%21%3B" , 'odm2', 'web', '1Forgetit!'], - # ["mssql_pyodbc2_azure", "mssql", "mssql+pyodbc:///?odbc_connect=DRIVER%3DFreeTDS%3BSERVERNAME%3Dnrb8xkgxaj.database.windows.net%3BUID%3Dweb@nrb8xkgxaj%3BPWD%3D1Forgetit!%3BDATABASE%3Dodm2", ], - # ["mssql_pyodbc3_azure", "mssql", "mssql+pyodbc:///?odbc_connect=DRIVER%3D{FreeTDS}%3BSERVERNAME%3Dnrb8xkgxaj.database.windows.net%3BUID%3Dweb%3DPWD%3D1Forgetit!%3BDATABASE%3Dodm2",], - #'mssql+pyodbc:///?odbc_connect=DRIVER%3D%7BFreeTDS%7D%3BDSN%3Dnrb8xkgxaj.database.windows.net%3BUID%3Dweb%3BPWD%3D1Forgetit%21%3B' - # ["mssql_pyodbc_moonstone", "mssql","mssql+pyodbc:///?odbc_connect=DRIVER%3D%7BFreeTDS%7D%3BDSN%3Dmoonstone%3BUID%3Dwebservice%3BPWD%3Dwebservice%21%3BDATABASE=odm2",], - # ["mssql_pyodbc2", "mssql", "mssql+pyodbc:///?odbc_connect=DRIVER={FreeTDS};SERVERNAME=moonstone.ucsd.edu;UID=webservice;PWD=webservice;DATABASE=odm2", ], - # ["pymssql_azre", "mssql", "mssql+pymssql://web@nrb8xkgxaj:1Forgetit!@kyle?charset=utf8", ], - # ["pymssql_azre2", "mssql", "mssql+pymssql://web:1Forgetit!@kyle?charset=utf8",], - # ["pymssql_moonstone", "mssql", "mssql+pymssql://webservice:webservice@moonstone?charset=utf8"], - # ["mssql", "localhost", 'odm2', 'odm', 'odm'], - # ["sqlite", "./tests/spatialite/odm2_test.sqlite", None, None, None], - ["sqlite_wof", "sqlite","sqlite:///./tests/spatialite/wof2odm/ODM2.sqlite", None, None, None] - #'sqlite:///./tests/spatialite/wof2odm/ODM2.sqlite' + ['postgresql_marchantariats_none', 'postgresql', + 'postgresql+psycopg2://postgres:None@localhost/marchantariats', + 'marchantariats', 'postgres', None], + ['postgresql_marchantariats_empty', 'postgresql', + 'postgresql+psycopg2://postgres@localhost/marchantariats', + 'marchantariats', 'postgres', None], + ['sqlite_wof', 'sqlite', 'sqlite:///./tests/spatialite/wof2odm/ODM2.sqlite', None, None, None] ] -dbs_test = [ - ["sqlite_test","sqlite" "./tests/spatialite/odm2_test.sqlite", None, None, None] +dbs_test = [ + ['sqlite_test', 'sqlite' './tests/spatialite/odm2_test.sqlite', None, None, None] ] + + class aSessionFactory: def __init__(self, request): - #session_factory = dbconnection.createConnection('mysql', 'localhost', 'odm2', 'ODM', 'odm') db = request.param - print ("dbtype", db[0], db[1] ) - #session_factory = dbconnection.createConnection(db[0],db[1],db[2],db[3],db[4], echo=True) + print ('dbtype', db[0], db[1]) session_factory = SessionFactory(db[2]) setSchema(session_factory.engine) - assert session_factory is not None, ("failed to create a session for ", db[0], db[1]) -# assert session_factory.engine is not None, ("failed: session has no engine ", db[0], db[1]) -# - # insp = reflection.Inspector.from_engine(session_factory.engine) -# tables = insp.get_table_names() - + assert session_factory is not None, ('failed to create a session for ', db[0], db[1]) self.session = session_factory.getSession() -# -# params=["sqlite+pysqlite:///../../ODM2PythonAPI/tests/spatialite/odm2_test.sqlite", "mail.python.org"]) -@pytest.fixture(scope="session", params = dbs_readonly) +@pytest.fixture(scope='session', params=dbs_readonly) def setup(request): return aSessionFactory(request) -#connect to all 4 database types( mssql, mysql, postgresql, sqlite, mssql on mac) def test_aSessionFactory(setup): - - q= setup.session.query(CVElevationDatum) - results= q.all() - #print results + q = setup.session.query(CVElevationDatum) + results = q.all() assert len(results) > 0 - - - diff --git a/tests/test_connection.py b/tests/test_connection.py index 41c44b2..e233eff 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -1,65 +1,52 @@ -__author__ = 'valentine' +from __future__ import (absolute_import, division, print_function) + +from odm2api.ODM2.models import CVElevationDatum from odm2api.ODMconnection import dbconnection -from odm2api.ODM2.models import * import pytest from sqlalchemy.engine import reflection -# assumes that pytest is being run from ODM2PythonAPI director -# [ name, driver, server, database, user, password ] + +__author__ = 'valentine' + dbs_readonly = [ - # ['mysql', 'localhost', 'odm2', 'ODM', 'odm'], ['mysql_odm2_odm', 'mysql', 'localhost', 'odm2', 'ODM', 'odm'], - ['mysql_odm2_root','mysql', 'localhost', 'odm2', 'root', None], - ['postgresql_marchantariats','postgresql', 'localhost', 'marchantariats', 'postgres', 'iforget'], - -# bet the @ is scrwing thing up - # ["mssql", "nrb8xkgxaj.database.windows.net" , 'odm2', 'web@nrb8xkgxaj', '1Forgetit!'], - # ["mssql_azure", "mssql", "azure", 'odm2', 'web@nrb8xkgxaj', '1Forgetit!'], -# ["mssql", "localhost", 'odm2', 'odm', 'odm'], - # ["sqlite", "./tests/spatialite/odm2_test.sqlite", None, None, None], - ["sqlite_wof","sqlite", "./tests/spatialite/wof2odm/ODM2.sqlite", None, None, None] + ['mysql_odm2_root', 'mysql', 'localhost', 'odm2', 'root', None], + ['postgresql_marchantariats', 'postgresql', 'localhost', 'marchantariats', 'postgres', 'iforget'], + ['sqlite_wof', 'sqlite', './tests/spatialite/wof2odm/ODM2.sqlite', None, None, None] ] -dbs_test = [ - # ["sqlite_local","sqlite", "./tests/spatialite/odm2_test.sqlite", None, None, None], - ["sqlite_memory", "sqlite", ":memory:", None, None, None] +dbs_test = [ + ['sqlite_memory', 'sqlite', ':memory:', None, None, None] ] + + class Connection: def __init__(self, request): - #session_factory = dbconnection.createConnection('mysql', 'localhost', 'odm2', 'ODM', 'odm') db = request.param - print ("dbtype", db[0], db[1] ) - session_factory = dbconnection.createConnection(db[1],db[2],db[3],db[4],db[5], echo=True) - assert session_factory is not None, ("failed to create a session for ", db[0], db[1]) - assert session_factory.engine is not None, ("failed: session has no engine ", db[0], db[1]) + print ('dbtype', db[0], db[1]) + session_factory = dbconnection.createConnection(db[1], db[2], db[3], db[4], db[5], echo=True) + assert session_factory is not None, ('failed to create a session for ', db[0], db[1]) + assert session_factory.engine is not None, ('failed: session has no engine ', db[0], db[1]) insp = reflection.Inspector.from_engine(session_factory.engine) - tables = insp.get_table_names() + insp.get_table_names() self.session = session_factory.getSession() - # self.session = session_factory.test_Session() -# -# params=["sqlite+pysqlite:///../../ODM2PythonAPI/tests/spatialite/odm2_test.sqlite", "mail.python.org"]) -@pytest.fixture(scope="session", params = dbs_readonly) +@pytest.fixture(scope='session', params=dbs_readonly) def setup(request): return Connection(request) -#connect to all 4 database types( mssql, mysql, postgresql, sqlite, mssql on mac) def test_connection(setup): - - q= setup.session.query(CVElevationDatum) - results= q.all() - #print results + q = setup.session.query(CVElevationDatum) + results = q.all() assert len(results) > 0 def test_create_all_schema(): pass + def test_create_all_no_schema(): pass - - - diff --git a/tests/test_example.py b/tests/test_example.py deleted file mode 100644 index 7525f4d..0000000 --- a/tests/test_example.py +++ /dev/null @@ -1,88 +0,0 @@ -# run with 'py.test -s test_example.py' - -def multiply(x, y): - return x * y - - -############ -# Fixtures # -############ -def setup_module(module): - print "setup_module module:%s" % module.__name__ - - -def teardown_module(module): - print "teardown_module module:%s" % module.__name__ - - -def setup_function(function): - print "setup_function function:%s" % function.__name__ - - -def teardown_function(function): - print "teardown_function function:%s" % function.__name__ - - -######### -# Tests # -######### - - -def test_numbers_3_4(): - assert multiply(3, 4) == 12 - - -def test_strings_a_3(): - assert multiply('a', 3) == 'aaa' - - -############ -# Funcargs # -############ -def pytest_funcarg__myfuncarg(request): - return 42 - - -def test_function(myfuncarg): - assert myfuncarg == 42 - - -def pytest_generate_tests(metafunc): - if "numiter" in metafunc.funcargnames: - metafunc.parametrize("numiter", range(10)) - - -def test_func(numiter): - assert numiter < 10 - - -############## -# Test Class # -############## -class TestMult: - def setup(self): - print "setup class:TestStuff" - - def teardown(self): - print "teardown class:TestStuff" - - def setup_class(cls): - print "setup_class class:%s" % cls.__name__ - - def teardown_class(cls): - print "teardown_class class:%s" % cls.__name__ - - def setup_method(self, method): - print "setup_method method:%s" % method.__name__ - - def teardown_method(self, method): - print "teardown_method method:%s" % method.__name__ - - - def test_numbers_5_6(self): - print 'test_numbers_5_6 <================== actual test code' - assert multiply(5, 6) == 30 - - def test_string_b_2(self): - print 'test_string_b_2 <================== actual test code' - assert multiply('b', 2) == 'bb' \ No newline at end of file diff --git a/tests/test_odm2/__init__.py b/tests/test_odm2/__init__.py index a24dbc6..223be85 100644 --- a/tests/test_odm2/__init__.py +++ b/tests/test_odm2/__init__.py @@ -1 +1,3 @@ +from __future__ import (absolute_import, division, print_function) + __author__ = 'stephanie' diff --git a/tests/test_odm2/test_createservice.py b/tests/test_odm2/test_createservice.py index ad876b6..cff0466 100644 --- a/tests/test_odm2/test_createservice.py +++ b/tests/test_odm2/test_createservice.py @@ -1,268 +1,249 @@ -import pytest +from __future__ import (absolute_import, division, print_function) + import datetime -from os.path import * +import uuid +from os.path import abspath, dirname, join + from odm2api.ODM2 import models -from odm2api.ODMconnection import dbconnection from odm2api.ODM2.services.createService import CreateODM2 -import uuid +from odm2api.ODMconnection import dbconnection + +import pytest # run this test from the root directory using: # python -m pytest tests/test_odm2/test_createservice.py -globals = {} +globals_vars = {} + class TestCreateService: - @pytest.fixture(scope="class", autouse=True) + @pytest.fixture(scope='class', autouse=True) def build_db(self): """ Builds an empty sqlite (in-memory) database for testing :return: None + """ # path to the ddl script for building the database - ddlpath= abspath(join(dirname(__file__), 'data/empty.sql')) + ddlpath = abspath(join(dirname(__file__), 'data/empty.sql')) # create and empty sqlite database for testing db = dbconnection.createConnection('sqlite', ':memory:') # read the ddl script and remove the first (BEGIN TRANSACTION) and last (COMMIT) lines ddl = open(ddlpath, 'r').read() - ddl = ddl.replace('BEGIN TRANSACTION;','') - ddl = ddl.replace('COMMIT;','') + ddl = ddl.replace('BEGIN TRANSACTION;', '') + ddl = ddl.replace('COMMIT;', '') # execute each statement to build the odm2 database for line in ddl.split(');')[:-1]: try: db.engine.execute(line + ');') except Exception as e: - print e + print(e) self.write = CreateODM2(db) - self.engine= db.engine + self.engine = db.engine - globals['write'] = self.write - globals['engine'] = self.engine - globals['db'] = db - # return self.write, self.engine + globals_vars['write'] = self.write + globals_vars['engine'] = self.engine + globals_vars['db'] = db def setup(self): - - self.writer = globals['write'] - self.engine = globals['engine'] - self.db = globals['db'] + self.writer = globals_vars['write'] + self.engine = globals_vars['engine'] + self.db = globals_vars['db'] def test_createVariable(self): - # assert that there are no variables in the database res = self.engine.execute('SELECT * from Variables') assert(len(res.fetchall()) == 0) - # create a new variable code = 'MyVar' name = 'My Test Variable' vType = 'Hydrology' nodv = -9999 - speciation="mg/L as PO4" - definition="This is a test variable" - v = models.Variables(VariableCode = code, VariableNameCV=name, VariableTypeCV=vType, NoDataValue= nodv, SpeciationCV = None, - VariableDefinition=None) - # self.writer.createVariable(code = code,name = name,vType = vType,nodv =nodv,speciation=None,definition=None) - val=self.writer.createVariable(v) - # assert that this dataset has been successfully inserted - res = self.engine.execute('SELECT * from Variables WHERE VariableCode = "MyVar" ORDER BY VariableID DESC').first() + speciation = 'mg/L as PO4' + definition = 'This is a test variable' + v = models.Variables( + VariableCode=code, + VariableNameCV=name, + VariableTypeCV=vType, + NoDataValue=nodv, + SpeciationCV=None, + VariableDefinition=None + ) + val = self.writer.createVariable(v) + res = self.engine.execute( + 'SELECT * from Variables WHERE VariableCode = "MyVar" ORDER BY VariableID DESC' + ).first() assert(res is not None) assert(res[0] == val.VariableID) - assert(res[1] == vType ) # vType - assert(res[2] == code ) # code - assert(res[3] == name ) # name - assert(res[4] == None) # definition - assert(res[5] == None) # speciation - assert(res[6] == nodv ) # nodata - - v = models.Variables(VariableCode = code, VariableNameCV=name, VariableTypeCV=vType, NoDataValue= nodv, SpeciationCV = speciation, - VariableDefinition=None) - # self.writer.createVariable(code = code,name = name,vType = vType,nodv =nodv,speciation=speciation,definition=None) - val=self.writer.createVariable(v) - + assert(res[1] == vType) # vType + assert(res[2] == code) # code + assert(res[3] == name) # name + assert(res[4] is None) # definition + assert(res[5] is None) # speciation + assert(res[6] == nodv) # nodata + + v = models.Variables( + VariableCode=code, + VariableNameCV=name, + VariableTypeCV=vType, + NoDataValue=nodv, + SpeciationCV=speciation, + VariableDefinition=None + ) + val = self.writer.createVariable(v) # assert that this dataset has been successfully inserted - res = self.engine.execute('SELECT * from Variables WHERE VariableCode = "MyVar" ORDER BY VariableID DESC').first() + res = self.engine.execute( + 'SELECT * from Variables WHERE VariableCode = "MyVar" ORDER BY VariableID DESC' + ).first() assert(res is not None) assert(res[0] == val.VariableID) - assert(res[1] == vType ) # vType - assert(res[2] == code ) # code - assert(res[3] == name ) # name - assert(res[4] == None) # definition - assert(res[5] == speciation) # speciation - assert(res[6] == nodv ) # nodata - - v = models.Variables(VariableCode = code, VariableNameCV=name, VariableTypeCV=vType, NoDataValue= nodv, SpeciationCV = None, - VariableDefinition=definition) - # self.writer.createVariable(code = code,name = name,vType = vType,nodv =nodv,speciation=None,definition=definition) - val=self.writer.createVariable(v) - + assert(res[1] == vType) # vType + assert(res[2] == code) # code + assert(res[3] == name) # name + assert(res[4] is None) # definition + assert(res[5] == speciation) # speciation + assert(res[6] == nodv) # nodata + + v = models.Variables( + VariableCode=code, + VariableNameCV=name, + VariableTypeCV=vType, + NoDataValue=nodv, + SpeciationCV=None, + VariableDefinition=definition + ) + val = self.writer.createVariable(v) # assert that this dataset has been successfully inserted - res = self.engine.execute('SELECT * from Variables WHERE VariableCode = "MyVar" ORDER BY VariableID DESC').first() + res = self.engine.execute( + 'SELECT * from Variables WHERE VariableCode = "MyVar" ORDER BY VariableID DESC' + ).first() assert(res is not None) assert(res[0] == val.VariableID) - assert(res[1] == vType ) # vType - assert(res[2] == code ) # code - assert(res[3] == name ) # name - assert(res[4] == definition) # definition - assert(res[5] == None) # speciation - assert(res[6] == nodv ) # nodata - - - v = models.Variables(VariableCode = code, VariableNameCV=name, VariableTypeCV=vType, NoDataValue= nodv, SpeciationCV = speciation, - VariableDefinition=definition) - # self.writer.createVariable(code = code,name = name,vType = vType,nodv =nodv,speciation=speciation,definition=definition) - val= self.writer.createVariable(v) + assert(res[1] == vType) # vType + assert(res[2] == code) # code + assert(res[3] == name) # name + assert(res[4] == definition) # definition + assert(res[5] is None) # speciation + assert(res[6] == nodv) # nodata + + v = models.Variables( + VariableCode=code, + VariableNameCV=name, + VariableTypeCV=vType, + NoDataValue=nodv, + SpeciationCV=speciation, + VariableDefinition=definition + ) + val = self.writer.createVariable(v) - # assert that this dataset has been successfully inserted - res = self.engine.execute('SELECT * from Variables WHERE VariableCode = "MyVar" ORDER BY VariableID DESC').first() + res = self.engine.execute( + 'SELECT * from Variables WHERE VariableCode = "MyVar" ORDER BY VariableID DESC' + ).first() assert(res is not None) assert(res[0] == val.VariableID) - assert(res[1] == vType ) # vType - assert(res[2] == code ) # code - assert(res[3] == name ) # name - assert(res[4] == definition) # definition - assert(res[5] == speciation) # speciation - assert(res[6] == nodv ) # nodata - - @pytest.mark.skipif(True, reason="implement") + assert(res[1] == vType) # vType + assert(res[2] == code) # code + assert(res[3] == name) # name + assert(res[4] == definition) # definition + assert(res[5] == speciation) # speciation + assert(res[6] == nodv) # nodata + + @pytest.mark.skipif(True, reason='implement') def test_createMethod(self): pass - @pytest.mark.skipif(True, reason="implement") + @pytest.mark.skipif(True, reason='implement') def test_createProcessingLevel(self): pass - @pytest.mark.skipif(True, reason="implement") + @pytest.mark.skipif(True, reason='implement') def test_createSamplingFeature(self): pass - @pytest.mark.skipif(True, reason="implement") + @pytest.mark.skipif(True, reason='implement') def test_createUnit(self): pass - @pytest.mark.skipif(True, reason="implement") + @pytest.mark.skipif(True, reason='implement') def test_createOrganization(self): pass - @pytest.mark.skipif(True, reason="implement") + @pytest.mark.skipif(True, reason='implement') def test_createPerson(self): pass - @pytest.mark.skipif(True, reason="implement") + @pytest.mark.skipif(True, reason='implement') def test_createAffiliation(self): pass def test_createDataset(self): - type = "Generic" - code = "MyNewDataset" - title= "Just a test dataset" - desc = "this record represents a test dataset" + dataset_type_cv = 'Generic' + code = 'MyNewDataset' + title = 'Just a test dataset' + desc = 'this record represents a test dataset' - # assert that there are no datasets in the database res = self.engine.execute('SELECT * from DataSets') assert(len(res.fetchall()) == 0) # create a new dataset - # dataset = self.writer.createDataset(dstype=type, - # dscode=code, - # dstitle=title, - # dsabstract=desc) - - d = models.DataSets(DataSetTypeCV = type, DataSetCode =code, DataSetTitle=title, DataSetAbstract = desc, DataSetUUID = uuid.uuid4().hex) + d = models.DataSets( + DataSetTypeCV=dataset_type_cv, + DataSetCode=code, + DataSetTitle=title, + DataSetAbstract=desc, + DataSetUUID=uuid.uuid4().hex + ) dataset = self.writer.createDataset(d) assert(dataset == d) - assert (dataset.DataSetID ==1) + assert (dataset.DataSetID == 1) # assert that this dataset has been successfully inserted res = self.engine.execute('SELECT * from DataSets').fetchall() assert(len(res) == 1) - assert(res[0][0]==dataset.DataSetID) + assert(res[0][0] == dataset.DataSetID) - @pytest.mark.skipif(True, reason="implement") + @pytest.mark.skipif(True, reason='implement') def test_createDatasetResults(self): pass - @pytest.mark.skipif(True, reason="implement") + @pytest.mark.skipif(True, reason='implement') def test_createAction(self): pass - @pytest.mark.skipif(True, reason="implement") + @pytest.mark.skipif(True, reason='implement') def test_createActionBy(self): pass - @pytest.mark.skipif(True, reason="implement") + @pytest.mark.skipif(True, reason='implement') def test_createFeatureAction(self): pass - # def test_createResult(self): - # - # # assert that there are no results - # res = self.engine.execute('SELECT * FROM Results') - # assert(len(res.fetchall()) == 0) - # - # # create a result record - # # self.writer.createResult(featureactionid = 1, - # # variableid = 1, - # # unitid = 1, - # # processinglevelid = 1, - # # valuecount = 0, - # # sampledmedium = 'unknown', - # # resulttypecv = 'time series', - # # taxonomicclass=None, resultdatetime=None, resultdatetimeutcoffset=None, - # # validdatetime=None, validdatetimeutcoffset=None, statuscv=None) - # - # r = models.Results(FeatureActionID = 1, - # VariableID=1, - # UnitsID =1, - # ProcessingLevelID = 1, - # ValueCount = 0, - # SampledMediumCV = 'unknown', - # ResultTypeCV = 'time series', - # TaxonomicClassifierID = None, - # ResultDateTime = None, - # ResultDateTimeUTCOffset = None, - # ValidDateTime=None, - # ValidDateTimeUTCOffset = None, - # StatusCV = None, - # ResultUUID = uuid.uuid4().hex - # - # ) - # self.writer.createResult(r) - # - # - # # assert that there are results - # res = self.engine.execute('SELECT * FROM Results') - # assert(len(res.fetchall()) == 1) - def test_createTimeSeriesResult(self): - # assert that there are no time series results in the database res = self.engine.execute('SELECT * FROM TimeSeriesResults').first() assert(res is None) - # create most basic time series result record possible - r = models.TimeSeriesResults(FeatureActionID= 1, - VariableID=1, - UnitsID =1, - ProcessingLevelID = 1, - ValueCount = 0, - SampledMediumCV = 'unknown', - ResultTypeCV = 'time series', - ResultUUID = str(uuid.uuid4()), - AggregationStatisticCV = 'unknown' - + r = models.TimeSeriesResults( + FeatureActionID=1, + VariableID=1, + UnitsID=1, + ProcessingLevelID=1, + ValueCount=0, + SampledMediumCV='unknown', + ResultTypeCV='time series', + ResultUUID=str(uuid.uuid4()), + AggregationStatisticCV='unknown' ) - - newres=self.writer.createResult(r) - + newres = self.writer.createResult(r) # assert that this basic tsr exists in the database tsr = self.engine.execute('SELECT * FROM TimeSeriesResults').first() assert(tsr is not None) @@ -271,76 +252,59 @@ def test_createTimeSeriesResult(self): result = self.engine.execute('SELECT * FROM Results').first() assert(result is not None) - assert(newres.ResultID ==1) - assert(result[0] ==newres.ResultID) - - - + assert(newres.ResultID == 1) + assert(result[0] == newres.ResultID) - @pytest.mark.skipif(True, reason="implement") + @pytest.mark.skipif(True, reason='implement') def test_createTimeSeriesResultValues(self): pass - @pytest.mark.skipif(True, reason="implement") + @pytest.mark.skipif(True, reason='implement') def test_createSite(self): pass - @pytest.mark.skipif(True, reason="implement") + @pytest.mark.skipif(True, reason='implement') def test_createSpatialReference(self): pass - @pytest.mark.skipif(True, reason="implement") + @pytest.mark.skipif(True, reason='implement') def test_createDeploymentAction(self): pass - @pytest.mark.skipif(True, reason="implement") + @pytest.mark.skipif(True, reason='implement') def test_createModel(self): pass - @pytest.mark.skipif(True, reason="implement") + @pytest.mark.skipif(True, reason='implement') def test_createRelatedModel(self): pass - - - def test_createSimulation(self): + def test_createSimulation(self): # todo: insert should fail if unitID or actionID do not exist - # assert that there are no datasets in the database res = self.engine.execute('SELECT * from Simulations') assert(len(res.fetchall()) == 0) # create a new simulation - st = datetime.datetime(2016,1,1) - et = datetime.datetime(2016,1,25) - # sim = self.writer.createSimulation( actionid = 1, - # modelID=1, - # simulationName= 'MySimulation', - # simulationDescription = 'My simulation description', - # simulationStartDateTime = st, - # simulationStartOffset = 6, - # simulationEndDateTime = et, - # simulationEndOffset = 6, - # timeStepValue = 1, - # timeStepUnitID = 1, - # inputDatasetID=None) - s = models.Simulations(ActionID = 1, - SimulationName ="MySimulation", - SimulationDescription = "My simulation description", - SimulationStartDateTime = st, - SimulationStartDateTimeUTCOffset=6, - SimulationEndDateTime=et, - SimulationEndDateTimeUTCOffset=6, - TimeStepValue=1, - TimeStepUnitsID=1, - InputDataSetID=None, - ModelID = 1 - ) + st = datetime.datetime(2016, 1, 1) + et = datetime.datetime(2016, 1, 25) + s = models.Simulations( + ActionID=1, + SimulationName='MySimulation', + SimulationDescription='My simulation description', + SimulationStartDateTime=st, + SimulationStartDateTimeUTCOffset=6, + SimulationEndDateTime=et, + SimulationEndDateTimeUTCOffset=6, + TimeStepValue=1, + TimeStepUnitsID=1, + InputDataSetID=None, + ModelID=1 + ) sim = self.writer.createSimulation(s) assert (s == sim) assert (s.SimulationID == 1) # assert that this record has been successfully inserted res = self.engine.execute('SELECT * from Simulations').fetchall() assert(len(res) == 1) - assert(res[0][0]==s.SimulationID) - + assert(res[0][0] == s.SimulationID) diff --git a/tests/test_odm2/test_model.py b/tests/test_odm2/test_model.py index 3ce7e3d..82c6283 100644 --- a/tests/test_odm2/test_model.py +++ b/tests/test_odm2/test_model.py @@ -1,68 +1,42 @@ -__author__ = 'stephanie' -# run with 'py.test -s test_example.py' -from odm2api.ODMconnection import dbconnection -from odm2api.ODM2.models import * -from .. import test_connection as testConnection +from __future__ import (absolute_import, division, print_function) + +from odm2api.ODM2.models import (CVElevationDatum, CVSamplingFeatureGeoType, + CVSamplingFeatureType, SamplingFeatures) + import pytest -# class Connection: -# def __init__(self, request): -# #session_factory = dbconnection.createConnection('mysql', 'localhost', 'odm2', 'ODM', 'odm') -# db = request.param -# session_factory = dbconnection.createConnection(db[0],db[1],db[2],db[3],) -# self.session = session_factory.getSession() +from .. import test_connection as testConnection + +__author__ = 'stephanie' -# assumes that pytest is being run from ODM2PythonAPI directory dbs = testConnection.dbs_readonly -# dbs = [ -# # ['mysql', 'localhost', 'odm2', 'ODM', 'odm'], -# # ["sqlite", "./tests/spatialite/odm2_test.sqlite",None, None] -# ["sqlite", "../odm2_test.sqlite",None, None, None] -# ] -# -# params=["sqlite+pysqlite:///../../ODM2PythonAPI/tests/spatialite/odm2_test.sqlite", "mail.python.org"]) -@pytest.fixture(scope="session", params = dbs) + +@pytest.fixture(scope='session', params=dbs) def setup(request): return testConnection.Connection(request) - # #session_factory = dbconnection.createConnection('mysql', 'localhost', 'odm2', 'ODM', 'odm') - # db = request.param - # session_factory = dbconnection.createConnection(db[0],db[1],db[2],db[3],) - # self.session = session_factory.getSession() -############ -# Fixtures # -############ -#class TestODM2: -# @pytest.fixture(autouse=True) +# Fixtures def test_cvelevationdatum(setup): - q= setup.session.query(CVElevationDatum) - results= q.all() - #print results + q = setup.session.query(CVElevationDatum) + results = q.all() assert len(results) > 0 + def test_cvsamplingfeatuergeotype(setup): - q=setup.session.query(CVSamplingFeatureGeoType) + q = setup.session.query(CVSamplingFeatureGeoType) results = q.all() - #print results assert len(results) > 0 + def test_cvsamplingfeaturetype(setup): q = setup.session.query(CVSamplingFeatureType) results = q.all() - #print results assert len(results) > 0 + def test_sampling_feature(setup): q = setup.session.query(SamplingFeatures) results = q.all() - ''' - for r in results: - print r - print r.SamplingFeatureGeotypeCV - print r.FeatureGeometry - #print results - ''' assert len(results) > 0 - diff --git a/tests/test_odm2/test_odm2.py b/tests/test_odm2/test_odm2.py index da8aab3..a50f470 100644 --- a/tests/test_odm2/test_odm2.py +++ b/tests/test_odm2/test_odm2.py @@ -1,52 +1,40 @@ from __future__ import (absolute_import, division, print_function) -#import unittest - -from odm2api.ODMconnection import dbconnection -from odm2api.ODM2.services.readService import ReadODM2 +from odm2api.ODM2.models import Methods, Models, People, ProcessingLevels, RelatedModels, Variables from odm2api.ODM2.services.createService import CreateODM2 -from odm2api.ODM2.services.updateService import UpdateODM2 from odm2api.ODM2.services.deleteService import DeleteODM2 -from odm2api.ODM2.models import (People, - Variables, - Methods, - ProcessingLevels, - Models, - RelatedModels) +from odm2api.ODM2.services.readService import ReadODM2 +from odm2api.ODM2.services.updateService import UpdateODM2 +from odm2api.ODMconnection import dbconnection -from tests import test_connection as testConnection import pytest +from tests import test_connection as testConnection + __author__ = ['tony castronova', 'david valentine'] xfail = pytest.mark.xfail skipif = xfail = pytest.mark.skipif -#from pytest import raises use pytest.raises() dbs = testConnection.dbs_test -# @pytest.fixture(scope="session", params = dbs) -# def setup(request): -# return testConnection.Connection(request) class odmConnection(): pass -#class test_sqlite(unittest.TestCase): -# class Testsqlite(): -@pytest.fixture(scope="function", params=dbs) -#@classmethod -def setup( request): + +@pytest.fixture(scope='function', params=dbs) +def setup(request): # build an empty database for testing # conn = dbconnection.createConnection('sqlite', ':memory:') db = request.param - print("dbtype", db[0], db[1]) + print('dbtype', db[0], db[1]) session_factory = dbconnection.createConnection(db[1], db[2], db[3], db[4], db[5], echo=False) - assert session_factory is not None, ("failed to create a session for ", db[0], db[1]) - assert session_factory.engine is not None, ("failed: session has no engine ", db[0], db[1]) + assert session_factory is not None, ('failed to create a session for ', db[0], db[1]) + assert session_factory.engine is not None, ('failed: session has no engine ', db[0], db[1]) # dbconnection._setSchema(conn.engine) dbConn = odmConnection # build connectors for read, write, update, and delete operations @@ -55,19 +43,15 @@ def setup( request): dbConn.odmupdate = UpdateODM2(session_factory) dbConn.odmdelete = DeleteODM2(session_factory) s = session_factory.getSession() - # initialize the in-memory database, loop through each command (skip first and last lines) - #build = open('./tests/spatialite/build_empty.sqlite').read() if (db[2] == ':memory:'): build = open('./tests/schemas/sqlite/ODM2_for_SQLite.sql').read() for line in build.split(';\n'): s.execute(line) s.flush() - # s.invalidate() - print('database initialization completed successfully') def fin(): - print("teardown odm2 test connection") + print('teardown odm2 test connection') del dbConn.odmread del dbConn.odmcreate del dbConn.odmupdate @@ -80,14 +64,14 @@ def fin(): return dbConn -@pytest.mark.skipif(True, reason="Enable for testing: CreateService Session closes on failed create #52") -def test_SessionNotFailed(setup): - # goal of this is to see that if we force errors like a null value, or duplicate that the session does not fail - # create some people - setup.odmcreate.createPerson(firstName="tony", - lastName='castronova', - middleName='michael') +@pytest.mark.skipif(True, reason='Enable for testing: CreateService Session closes on failed create #52') +def test_SessionNotFailed(setup): + setup.odmcreate.createPerson( + firstName='tony', + lastName='castronova', + middleName='michael' + ) with pytest.raises(Exception) as excinfo: # this one should fail due to a not null constraint @@ -98,66 +82,39 @@ def test_SessionNotFailed(setup): assert 'NULL' in str(excinfo.value) # now add again - setup.odmcreate.createPerson(firstName="tony", + setup.odmcreate.createPerson(firstName='tony', lastName='castronova', middleName=None) - - # with pytest.raises(Exception) as excinfo: - # # duplicate - # setup.odmcreate.createPerson(firstName="tony", - # lastName='castronova', - # middleName='michael') - # - # assert 'People.PersonFirstName may not be NULL' in str(excinfo.value) - - setup.odmcreate.createPerson(firstName="john", - lastName='doe') - + setup.odmcreate.createPerson( + firstName='john', + lastName='doe' + ) people = setup.odmread.getPeople() - assert len(people) == 3, "People should have been 3" + assert len(people) == 3, 'People should have been 3' -# @classmethod -# def tearDownClass(self): -# del self.odmread -# del self.odmcreate -# del self.odmupdate -# del self.odmdelete def test_createPerson(setup): # create some people - p1 = People(PersonFirstName="tony", PersonLastName='castronova', PersonMiddleName='Michael') - p2 = People(PersonFirstName="tony", PersonLastName='castronova') - p3 = People(PersonFirstName="john", PersonLastName='doe') + p1 = People(PersonFirstName='tony', PersonLastName='castronova', PersonMiddleName='Michael') + p2 = People(PersonFirstName='tony', PersonLastName='castronova') + p3 = People(PersonFirstName='john', PersonLastName='doe') setup.odmcreate.createPerson(p1) setup.odmcreate.createPerson(p2) setup.odmcreate.createPerson(p3) - # setup.odmcreate.createPerson(firstName="tony", - # lastName='castronova', - # middleName='michael') - # - # setup.odmcreate.createPerson(firstName="tony", - # lastName='castronova', - # middleName=None) - # setup.odmcreate.createPerson(firstName="john", - # lastName='doe') - people = setup.odmread.getPeople() - assert len(people) == 3, "People should have been 3" + assert len(people) == 3, 'People should have been 3' + def test_personFail(setup): - with pytest.raises(Exception) as excinfo: + with pytest.raises(Exception) as excinfo: # this one should fail due to a not null constraint + p1 = People(PersonFirstName=None, PersonLastName='doe', PersonMiddleName='john') + setup.odmcreate.createPerson(p1) + assert 'null' in str(excinfo.value).lower() - # setup.odmcreate.createPerson(firstName=None, - # lastName='castronova', - # middleName='michael') - p1 = People(PersonFirstName=None, PersonLastName='doe', PersonMiddleName='john') - setup.odmcreate.createPerson(p1) - - assert 'null' in str(excinfo.value).lower() def test_createVariable(setup): v1 = Variables(VariableCode='Phos_TOT', @@ -190,12 +147,14 @@ def test_createVariable(setup): with pytest.raises(Exception) as excinfo: # insert duplicate setup.odmcreate.createVariable( - Variables(VariableCode='Phos_TOT', - VariableNameCV='Phosphorus, total dissolved', - VariableTypeCV='Hydrology', - NoDataValue=-999, - SpeciationCV=None, - VariableDefinition=None) + Variables( + VariableCode='Phos_TOT', + VariableNameCV='Phosphorus, total dissolved', + VariableTypeCV='Hydrology', + NoDataValue=-999, + SpeciationCV=None, + VariableDefinition=None + ) ) assert 'unique' in str(excinfo.value).lower() @@ -224,7 +183,6 @@ def test_createMethod(setup): setup.odmcreate.createMethod(m2) setup.odmcreate.createMethod(m3) methods = setup.odmread.getMethods() - assert len(methods) == 3 @@ -234,93 +192,99 @@ def test_ProcessingLevel(setup): Explanation=None) setup.odmcreate.createProcessingLevel(pl) res = setup.odmread.getProcessingLevels() - assert len(res) == 1 -@skipif(True, reason="Needs data") -def test_createSamplingFeature(setup): - +@skipif(True, reason='Needs data') +def test_createSamplingFeature(setup): res = setup.odmread.getSamplingFeatures() - assert len(res) == 1 -@skipif(True, reason="Needs data") -def test_createUnit(setup): - res = setup.odmread.getUnits() +@skipif(True, reason='Needs data') +def test_createUnit(setup): + res = setup.odmread.getUnits() assert len(res) == 1 -@skipif(True, reason="Needs data") + + +@skipif(True, reason='Needs data') def test_createOrganization(setup): res = setup.odmread.getOrganizations() - assert len(res) == 1 -@skipif(True, reason="Needs data") +@skipif(True, reason='Needs data') def test_createAffiliation(setup): res = setup.odmread.getAffiliationsByPerson() - assert len(res) == 1 -@skipif(True, reason="Needs data") + +@skipif(True, reason='Needs data') def test_createDataset(setup): res = setup.odmread.getDataSets() - assert len(res) == 1 -@skipif(True, reason="Needs data") + + +@skipif(True, reason='Needs data') def test_createDatasetResults(setup): res = setup.odmread.getProcessingLevels() - assert len(res) == 1 -@skipif(True, reason="Needs data") + + +@skipif(True, reason='Needs data') def test_createAction(setup): # todo: this function is missing # res = self.odmread.getActions() - assert 0 == 1 -@skipif(True, reason="Needs data") + + +@skipif(True, reason='Needs data') def test_createActionBy(setup): # todo; this function is missing # res = self.odmread.getActionsBy() - assert 0 == 1 -@skipif(True, reason="Needs data") -def test_createFeatureAction(setup): + +@skipif(True, reason='Needs data') +def test_createFeatureAction(setup): # todo: this function is missing # res = self.odmread.getFeatureActions() - assert 0 == 1 -@skipif(True, reason="Needs data") + + +@skipif(True, reason='Needs data') def test_createResult(setup): res = setup.odmread.getResults() - assert len(res) == 1 -@skipif(True, reason="Needs data") + + +@skipif(True, reason='Needs data') def test_createTimeSeriesResult(setup): res = setup.odmread.getTimeSeriesResults() - assert len(res) == 1 -@skipif(True, reason="Needs data") + + +@skipif(True, reason='Needs data') def test_createTimeSeriesResultValues(setup): res = setup.odmread.getTimeSeriesResultValues() - assert len(res) == 1 -@skipif(True, reason="Needs data") + + +@skipif(True, reason='Needs data') def test_createSite(setup): res = setup.odmread.getAllSites() - assert len(res) == 1 -@skipif(True, reason="Needs data") + + +@skipif(True, reason='Needs data') def test_createSpatialReference(setup): res = setup.odmread.getSpatialReferenceByCode() - assert len(res) == 1 -@skipif(True, reason="Needs data") + + +@skipif(True, reason='Needs data') def test_createDeploymentAction(setup): res = setup.odmread.getAllDeploymentAction() - assert len(res) == 1 @@ -340,9 +304,7 @@ def test_createModel(setup): # create with no description (expected: record inserted) setup.odmcreate.createModel(mod2) - res = setup.odmread.getModels() - assert len(res) == 2 res = setup.odmread.getModels(codes=['converter']) @@ -358,7 +320,8 @@ def test_createModel(setup): def test_createRelatedModel(setup): # create a relationship type setup.odmcreate.getSession().execute( - "insert into cv_relationshiptype values ('coupled', 'coupled converter', 'models that have been coupled together', 'modeling', NULL)") + 'insert into cv_relationshiptype values ("coupled", "coupled converter", "models that have been coupled together", "modeling", NULL)' # noqa + ) mod1 = Models(ModelCode='converter', ModelName='mymodel', ModelDescription='my test converter') @@ -390,8 +353,8 @@ def test_createRelatedModel(setup): m2rel = setup.odmread.getRelatedModels(code='model2') assert len(m2rel) == 0 -@skipif(True, reason="Needs data") + +@skipif(True, reason='Needs data') def test_createSimulation(setup): res = setup.odmread.getAllSimulations() - assert len(res) == 1 diff --git a/tests/test_odm2/test_readservice.py b/tests/test_odm2/test_readservice.py index ec9d158..6ea9154 100644 --- a/tests/test_odm2/test_readservice.py +++ b/tests/test_odm2/test_readservice.py @@ -1,18 +1,18 @@ from __future__ import (absolute_import, division, print_function) -import pytest -import datetime -from os.path import * +from os.path import abspath, dirname, join + from odm2api.ODM2 import models -from odm2api.ODMconnection import dbconnection from odm2api.ODM2.services.readService import ReadODM2 -from sqlalchemy.orm import class_mapper +from odm2api.ODMconnection import dbconnection + +import pytest + import sqlalchemy +from sqlalchemy.orm import class_mapper -# run this test from the root directory using: -# python -m pytest tests/test_odm2/test_readservice.py -globals = {} +globals_vars = {} def rawSql2Alchemy(rawsqlresult, sqlalchemyClass): @@ -21,40 +21,40 @@ def rawSql2Alchemy(rawsqlresult, sqlalchemyClass): :param rawsqlresult: array of values, sql select results :param sqlalchemyModelObj: converter object to convert into :return: populated converter object - """ - map = {} - class_attributes = [prop.key for prop in class_mapper(sqlalchemyClass).iterate_properties - if isinstance(prop, sqlalchemy.orm.ColumnProperty)] + """ + m = {} + class_attributes = [ + prop.key for prop in class_mapper(sqlalchemyClass).iterate_properties + if isinstance(prop, sqlalchemy.orm.ColumnProperty) + ] for i in range(len(class_attributes)): - map[class_attributes[i]] = rawsqlresult[i] + m[class_attributes[i]] = rawsqlresult[i] modelObj = sqlalchemyClass() - modelObj.__dict__ = map + modelObj.__dict__ = m return modelObj - class TestReadService: - - @pytest.fixture(scope="class", autouse=True) + @pytest.fixture(scope='class', autouse=True) def build_db(self): """ Builds a populated sqlite (in-memory) database for testing :return: None - """ + """ # path to the ddl script for building the database - ddlpath= abspath(join(dirname(__file__), 'data/populated.sql')) + ddlpath = abspath(join(dirname(__file__), 'data/populated.sql')) # create and empty sqlite database for testing db = dbconnection.createConnection('sqlite', ':memory:') # read the ddl script and remove the first (BEGIN TRANSACTION) and last (COMMIT) lines ddl = open(ddlpath, 'r').read() - ddl = ddl.replace('BEGIN TRANSACTION;','') - ddl = ddl.replace('COMMIT;','') + ddl = ddl.replace('BEGIN TRANSACTION;', '') + ddl = ddl.replace('COMMIT;', '') # execute each statement to build the odm2 database for line in ddl.split(');')[:-1]: @@ -64,49 +64,35 @@ def build_db(self): print(e) self.reader = ReadODM2(db) - self.engine= db.engine + self.engine = db.engine - globals['reader'] = self.reader - globals['engine'] = self.engine - globals['db'] = db - # return self.write, self.engine + globals_vars['reader'] = self.reader + globals_vars['engine'] = self.engine + globals_vars['db'] = db def setup(self): - - self.reader = globals['reader'] - self.engine = globals['engine'] - self.db = globals['db'] + self.reader = globals_vars['reader'] + self.engine = globals_vars['engine'] + self.db = globals_vars['db'] -# ################################################################################ # Sampling Features -# ################################################################################ - def test_getAllSamplingFeatures(self): - # get all models from the database res = self.engine.execute('SELECT * FROM SamplingFeatures').fetchall() - # get all simulations using the api resapi = self.reader.getSamplingFeatures() - assert len(res) == len(resapi) def test_getSamplingFeatureByID(self): - # get all models from the database res = self.engine.execute('SELECT * FROM SamplingFeatures').fetchone() - sfid=res[0] - + sfid = res[0] # get all simulations using the api - resapi = self.reader.getSamplingFeatures(ids = [sfid]) - + resapi = self.reader.getSamplingFeatures(ids=[sfid]) assert resapi is not None -# ################################################################################ # Models -# ################################################################################ - """ TABLE Models ModelID INTEGER NOT NULL PRIMARY KEY, @@ -118,32 +104,22 @@ def test_getSamplingFeatureByID(self): """ def test_getAllModels(self): - # get all models from the database res = self.engine.execute('SELECT * FROM Models').fetchall() - # get all simulations using the api resapi = self.reader.getModels() - assert len(res) == len(resapi) def test_getModelByCode(self): - # get a converter from the database res = self.engine.execute('SELECT * FROM Models').fetchone() modelCode = res[1] - - # get the converter using the api resapi = self.reader.getModels(codes=[modelCode]) - assert resapi is not None -# ################################################################################ # RelatedModels -# ################################################################################ - """ TABLE RelatedModels ( RelatedID INTEGER NOT NULL PRIMARY KEY, @@ -157,42 +133,30 @@ def test_getModelByCode(self): """ def test_getRelatedModelsByID(self): - # get related models by id using the api - # resapi = self.reader.getRelatedModelsByID(1) #was 2 - resapi = self.reader.getRelatedModels(id = 1) - # resapi = self.reader.getRelatedModels(id = 2) - + resapi = self.reader.getRelatedModels(id=1) assert resapi is not None assert resapi[0].ModelCode == 'swat' - # assert resapi[0].RelatedModelObj.ModelCode == 'swmm' def test_getRelatedModelsByCode(self): - # get related models by id using the api - resapi = self.reader.getRelatedModels(code = 'swat') - #resapi = self.reader.getRelatedModels(code = 'swmm') - + resapi = self.reader.getRelatedModels(code='swat') assert resapi is not None assert len(resapi) > 0 print(resapi[0].ModelCode) assert resapi[0].ModelCode == 'swat' - # assert resapi[0].RelatedModelObj.ModelCode == 'swmm' # test converter code that doesn't exist - resapi= self.reader.getRelatedModels(code = 'None') + resapi = self.reader.getRelatedModels(code='None') assert resapi is not None assert len(resapi) == 0 # test invalid argument - resapi = self.reader.getRelatedModels(code = 234123) + resapi = self.reader.getRelatedModels(code=234123) assert not resapi - -# ################################################################################ # Results -# ################################################################################ """ TABLE Results ( ResultID INTEGER NOT NULL PRIMARY KEY, @@ -212,31 +176,23 @@ def test_getRelatedModelsByCode(self): ValueCount INTEGER NOT NULL """ def test_getAllResults(self): - # get all results from the database res = self.engine.execute('SELECT * FROM Results').fetchall() print(res) # get all results using the api resapi = self.reader.getResults() - assert len(res) == len(resapi) def test_getResultsByID(self): - # get a result from the database res = self.engine.execute('SELECT * FROM Results').fetchone() resultid = res[1] - # get the result using the api resapi = self.reader.getResults(ids=[resultid]) - assert resapi is not None -# ################################################################################ # Simulations -# ################################################################################ - """ TABLE Simulations ( SimulationID INTEGER NOT NULL PRIMARY KEY, @@ -254,60 +210,48 @@ def test_getResultsByID(self): """ def test_getAllSimulations(self): - # get all simulation from the database res = self.engine.execute('SELECT * FROM Simulations').fetchall() - # get all simulations using the api - # resapi = self.reader.getAllSimulations() resapi = self.reader.getSimulations() assert len(res) == len(resapi) def test_getSimulationByName(self): - # get a simulation from the database res = self.engine.execute('SELECT * FROM Simulations').fetchone() simName = res[2] - # get simulation by name using the api - # resapi = self.reader.getSimulationByName(simulationName=simName) - resapi = self.reader.getSimulations(name = simName) + resapi = self.reader.getSimulations(name=simName) assert resapi is not None def test_getSimulationByActionID(self): - # get a simulation from the database res = self.engine.execute('SELECT * FROM Simulations').fetchone() actionID = res[1] - # get simulation by actionid using the api - # resapi = self.reader.getSimulationByActionID(actionID=actionID) resapi = self.reader.getSimulations(actionid=actionID) assert resapi is not None def test_getResultsBySimulationID(self): - # get a simulation from the database res = self.engine.execute('SELECT * FROM Simulations').fetchone() simulation = rawSql2Alchemy(res, models.Simulations) - # get the results id associated with the simulation - res = self.engine.execute('SELECT * from Results as r '\ - 'inner join FeatureActions as fa on fa.FeatureActionID == r.FeatureActionID ' \ - 'inner join Actions as a on a.ActionID == fa.ActionID ' \ - 'inner join Simulations as s on s.ActionID == a.ActionID '\ - 'where s.SimulationID = 1').first() + res = self.engine.execute( + 'SELECT * from Results as r ' + 'inner join FeatureActions as fa on fa.FeatureActionID == r.FeatureActionID ' + 'inner join Actions as a on a.ActionID == fa.ActionID ' + 'inner join Simulations as s on s.ActionID == a.ActionID ' + 'where s.SimulationID = 1' + ).first() assert len(res) > 0 res = rawSql2Alchemy(res, models.Results) print(res) # get simulation by id using the api # resapi = self.reader.getResultsBySimulationID(simulation.SimulationID) - resapi = self.reader.getResults(simulationid = simulation.SimulationID) + resapi = self.reader.getResults(simulationid=simulation.SimulationID) assert resapi is not None assert len(resapi) > 0 assert res.ResultID == resapi[0].ResultID - - -