Skip to content

Commit 7560412

Browse files
author
stephanie
committed
build action from scratch
1 parent ea3325b commit 7560412

2 files changed

Lines changed: 69 additions & 16 deletions

File tree

odmtools/odmservices/edit_service.py

Lines changed: 22 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -566,7 +566,7 @@ def getResult(self, var, meth, proc, action, action_by):
566566
values = self.memDB.getDataValuesDF()
567567
# copy old
568568
result = self.memDB.series_service.get_series(str(values["resultid"][0]))
569-
569+
newaction = Actions()
570570

571571

572572
# change var, meth proc, in df #intend ts, agg sta
@@ -579,24 +579,33 @@ def getResult(self, var, meth, proc, action, action_by):
579579
result.ProcessingLevelObj = proc
580580

581581
if meth:
582-
action.MethodID = meth.MethodID
583-
action.MethodObj = meth.MethodObj
582+
newaction.MethodID = meth.MethodID
583+
newaction.MethodObj = meth.MethodObj
584584

585585
#if result does not exist
586586
if not self.memDB.series_service.resultExists(result):
587587
try:
588588

589589
#create Action
590-
action.ActionID = None
591-
action.ActionTypeCV = "Derivation"
592-
self.memDB.series_service.read._session.expunge(action.MethodObj.OrganizationObj)
593-
self.memDB.series_service.read._session.expunge(action.MethodObj)
594-
action = self.memDB.series_service.create.createAction(action) # it times out. find out why
595-
print action
590+
# self.memDB.series_service.read._session.expunge(action.MethodObj.OrganizationObj)
591+
# self.memDB.series_service.read._session.expunge(action.MethodObj)
592+
593+
newaction.ActionDescription = action.ActionDescription
594+
newaction.ActionFileLink = action.ActionFileLink
595+
newaction.BeginDateTime = action.BeginDateTime
596+
newaction.BeginDateTimeUTCOffset = action.BeginDateTimeUTCOffset
597+
newaction.EndDateTime = action.EndDateTime
598+
newaction.EndDateTimeUTCOffset = action.EndDateTimeUTCOffset
599+
newaction.MethodID = action.MethodID
600+
newaction.ActionTypeCV = "Derivation"
601+
602+
print newaction
603+
newaction = self.memDB.series_service.create.createAction(newaction) # it times out. find out why
604+
print newaction
596605

597606

598607
# create Actionby done
599-
action_by.ActionID = action.ActionID
608+
action_by.ActionID = newaction.ActionID
600609
action_by= self.memDB.series_service.create.createActionby(action_by)
601610
print action_by
602611

@@ -607,8 +616,8 @@ def getResult(self, var, meth, proc, action, action_by):
607616

608617
feature_action = FeatureActions()
609618
feature_action.SamplingFeatureID = sampling_feature.SamplingFeatureID
610-
feature_action.ActionID = action.ActionID
611-
feature_action.ActionObj = action
619+
feature_action.ActionID = newaction.ActionID
620+
feature_action.ActionObj = newaction
612621
feature_action.SamplingFeatureObj = sampling_feature
613622
feature_action = self.memDB.series_service.create.createFeatureAction(feature_action)
614623
print feature_action
@@ -631,6 +640,7 @@ def getResult(self, var, meth, proc, action, action_by):
631640
result = self.memDB.series_service.create.createResult(result)
632641
print result
633642
except Exception as ex:
643+
self.memDB.series_service._session.rollback()
634644
print ex
635645
return self.updateResult(result)
636646

odmtools/odmservices/series_service.py

Lines changed: 47 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
from odm2api.ODM2.services import ReadODM2, UpdateODM2, DeleteODM2, CreateODM2
44
from odm2api import serviceBase
55
from odm2api.ODM2.models import *
6-
from odmtools.odmservices.to_sql_newrows import get_insert, get_delete, get_update
6+
#from odmtools.odmservices.to_sql_newrows import get_insert, get_delete, get_update
77
import datetime
88
from odmtools.common.logger import LoggerTool
99
import pandas as pd
@@ -646,14 +646,14 @@ def _get_df_query(self, values):
646646
def upsert_values(self, values):
647647
setSchema(self._session_factory.engine)
648648
query = self._get_df_query(values)
649-
newvals= get_insert(df = values, query = query, dup_cols = ["valuedatetime", "resultid"], engine = self._session_factory.engine)
649+
newvals= get_insert(df= values, query = query, dup_cols = ["valuedatetime", "resultid"], engine = self._session_factory.engine)
650650
if not newvals.empty:
651651
self.insert_values(newvals)
652-
delvals = get_delete(df = values, query = query, dup_cols = ["valuedatetime", "resultid"], engine = self._session_factory.engine)
652+
delvals = get_delete(df= values, query = query, dup_cols = ["valuedatetime", "resultid"], engine = self._session_factory.engine)
653653
if not delvals.empty:
654654
self.delete_dvs(delvals["valuedatetime"].tolist())
655655

656-
upvals = get_update(df = values, query = query, dup_cols = ["valuedatetime", "resultid"], engine = self._session_factory.engine)
656+
upvals = get_update(df= values, query= query, dup_cols = ["valuedatetime", "resultid"], engine= self._session_factory.engine)
657657
if not upvals.empty:
658658
self.update_values(upvals)
659659

@@ -981,3 +981,46 @@ def get_values_by_series(self, series_id):
981981
q = q.order_by(TimeSeriesResultValues.ValueDateTime)
982982

983983
return q.all()
984+
985+
986+
def get_delete(df, engine, query, dup_cols=[]):
987+
#query = get_df_query(df, tablename, dup_cols, filter_continuous_col=filter_continuous_col, filter_categorical_col=filter_categorical_col, filter_equal_col= filter_equal_col)
988+
df.drop_duplicates(dup_cols, keep='last', inplace=True)
989+
newdf = pd.merge(df, pd.read_sql(query, engine), how='right', on=dup_cols, indicator=True)
990+
newdf = newdf[newdf['_merge'] == 'right_only']
991+
newdf.drop(['_merge'], axis=1, inplace=True)
992+
return df[df['valuedatetime'].isin(newdf['valuedatetime'])]
993+
994+
def get_update(df, engine, query, dup_cols=[]):
995+
#query = get_df_query(df, tablename, dup_cols, filter_continuous_col=filter_continuous_col, filter_categorical_col=filter_categorical_col, filter_equal_col= filter_equal_col)
996+
df.drop_duplicates(dup_cols, keep='last', inplace=True)
997+
newdf = pd.merge(df, pd.read_sql(query, engine), how='inner', on=dup_cols, indicator=True)
998+
#newdf = newdf[newdf['_merge'] == 'right_only']
999+
newdf.drop(['_merge'], axis=1, inplace=True)
1000+
test = newdf[newdf['datavalue_x'] != newdf['datavalue_y']]
1001+
return df[df['valuedatetime'].isin(test['valuedatetime'])]
1002+
1003+
def get_insert(df, engine, query, dup_cols=[]):
1004+
"""
1005+
Remove rows from a dataframe that already exist in a database
1006+
Required:
1007+
df : dataframe to remove duplicate rows from
1008+
engine: SQLAlchemy engine object
1009+
tablename: tablename to check duplicates in
1010+
dup_cols: list or tuple of column names to check for duplicate row values
1011+
Optional:
1012+
filter_continuous_col: the name of the continuous data column for BETWEEEN min/max filter
1013+
can be either a datetime, int, or float data type
1014+
useful for restricting the database table size to check
1015+
filter_categorical_col : the name of the categorical data column for Where = value check
1016+
Creates an "IN ()" check on the unique values in this column
1017+
Returns
1018+
Unique list of values from dataframe compared to database table
1019+
"""
1020+
1021+
#query = get_df_query(df, tablename, dup_cols, filter_continuous_col=filter_continuous_col, filter_categorical_col=filter_categorical_col, filter_equal_col= filter_equal_col)
1022+
df.drop_duplicates(dup_cols, keep='last', inplace=True)
1023+
newdf = pd.merge(df, pd.read_sql(query, engine), how='left', on=dup_cols, indicator=True)
1024+
newdf = newdf[newdf['_merge'] == 'left_only']
1025+
newdf.drop(['_merge'], axis=1, inplace=True)
1026+
return df[df['valuedatetime'].isin(newdf['valuedatetime'])]

0 commit comments

Comments
 (0)