#!/usr/bin/env python # Classes to handle nodes on Detector hierarchy, Crate, Module and Channel tables. # # Orlando Soto, June 2014. import sys import math import time from sqlalchemy import types, event, create_engine, Column, Integer, String, TEXT, TIMESTAMP, DateTime, MetaData, Table, ForeignKey, ForeignKeyConstraint from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker, class_mapper, relationship, backref from sqlalchemy.orm.exc import NoResultFound, MultipleResultsFound from sqlalchemy.types import DateTime from datetime import datetime from compiler.ast import flatten time_format="%Y-%m-%d %H:%M:%S" ############# Some code to handle SQLite DateTime type in sqlalchemy ############ def sqlite_bugfix(): import time import datetime global time_format class MyEpochType(types.TypeDecorator): impl = types.Integer # Basic type. epoch = datetime.datetime(1970, 1, 1, 0, 0, 0) # Date reference. def process_bind_param(self, value, dialect): # Method for datetime writing. if dialect.name == 'sqlite': return time.mktime(time.strptime(str(value),time_format)) - time.mktime(self.epoch.timetuple()) elif dialect.name == 'mysql': return value else: print "Dialect %s time format not handled. Comment sqlite_bugfix or modify MyEpochType class." sys.exit(1) def process_result_value(self, value, dialect): # Method for reading datetime. if dialect.name == 'sqlite': return self.epoch + datetime.timedelta(seconds=value) elif dialect.name == 'mysql': return value else: print "Dialect %s time format not handled. Comment sqlite_bugfix or modify MyEpochType class." sys.exit(1) @event.listens_for(Table, "column_reflect") def setup_epoch(inspector, table, column_info): if isinstance(column_info['type'], types.DateTime): column_info['type'] = MyEpochType() ################################################################################# sqlite_bugfix() def make_classes(Base=None): global time_format if Base is None: engine = create_engine('sqlite:///tt.db', echo=False) # Create a connector for the data base SQLite Base = declarative_base() # Get the instance of the class to read the data base schemas. Base.metadata.reflect(engine,only = ('Detector_Hierarchy', 'Crate', 'Module', 'Channel')) # Reading daba base table schemas. Base.root_name = '' # Field created for tree structured tables class Node(object): ''' Class designed for the implementation of basic transversal methods. parameters: name : Default 'name' field. Used on the begining of recursive procedures. id : Default 'id' field. Used on the begining of recursive procedures. parent_id : Default 'parent_id' field. Used on the begining of recursive procedures. is_complete : Binary variable. True: the node was found, False: only a part of the node was found or nothing. quiet : Binary variable. Set silent mode (True: ON, False: OFF). wait_for_commit : Binary variable. Used to control commitment inside some methods. fk_name : Foreing key name used to identify unique nodes. key_name : Name of the column to find for coincidence. ''' name = None # Just in case nothing is there. id = None parent_id = None is_complete = False quiet = False wait_for_commit = False fk_name = None key_name = 'name' def get_node(self,session,data=None,fk=None): # e.g. data = list(,,,) for tree structured tables or primary data for regular tables. ''' Find and return a node. If something is found, the return type is type(self), else, return Node type. Accept tree structured tables and regular tables. ''' node = Node() if self.root_name: # Ask if the table has tree structure. for n in data: # Recursively look for the key_name passed on the data tuple. try: node = session.query(self.__class__).filter(getattr(self.__class__,self.key_name) == n, self.__class__.parent_id == node.id).one() except NoResultFound: if (not self.quiet): print ("Warning: Node not found.\nPart found: %s"%node.name) node.is_complete=False break except MultipleResultsFound: print ("Error: Multiple results") else: node.is_complete=True elif not isinstance(data,dict): try: node = session.query(self.__class__).filter(self.__mapper__.primary_key[0] == data).one() except NoResultFound: if (not self.quiet): print ("Warning: Node: %s not found"%node.name) node.is_complete=False except MultipleResultsFound: print ("Error: Multiple results\nYou should supply name or primary key for node: %s"%node.name) else: node.is_complete=True else: q = session.query(self.__class__) for k in data.keys(): q = q.filter(getattr(self.__class__,k) == data[k]) if self.fk_name is not None: q = q.filter(getattr(self.__class__,self.fk_name) == fk) try: node = q.one() except NoResultFound: if (not self.quiet): print ("Warning: Node: %s not found"%node.name) node.is_complete=False except MultipleResultsFound: print ("Error: Multiple results") else: node.is_complete=True return node def get_node_name(self): ''' Get node name including all its ascendancy. ''' node_name = self.name parent = self while parent.parent_id: parent = parent.parent node_name = parent.name + ":" + node_name return node_name def get_root_parent(self,level=0): ''' Get root parent. ''' cnt = 0 parent = self while parent: parent = parent.parent cnt = cnt + 1 cnt = cnt - 1 if level > cnt: print 'Error: Number of levels below root bigger than number of ancesters.' sys.exit(1) parent = self for i in range(cnt - level): parent = parent.parent return parent def insert(self,session,data,fk = None): node = self.__class__() for k in data.keys(): setattr(node, k, data[k]) if fk <> None: setattr(node, node.fk_name, fk) session.add(node) if (not node.wait_for_commit): session.commit()# primary key added to node object. return node def node_comp(self,data,fk = None): comp_dict = {} for k in data.keys(): comp_dict[k] = getattr(self,k) == data[k] if fk <> None: comp_dict[self.fk_name] = getattr(self,self.fk_name) == fk return comp_dict def show_diff(self,data,fk=None,fout=None): comp_dict = self.node_comp(data,fk) stdout_bk = sys.stdout if fout is not None: sys.stdout = fout if not reduce(lambda x,y:x and y,comp_dict.values()): print "%s has differences. Database('-'), new data('+')"%self.__table__.name for k,v in comp_dict.items(): if not v: print "Column '%s':"%k print "\t- %s"%(getattr(self,k)) print "\t+ %s"%(data[k]) sys.stdout = stdout_bk return True else: sys.stdout = stdout_bk return False def has_diff(self,data,fk=None,fout=None): comp_dict = self.node_comp(data,fk) if not reduce(lambda x,y:x and y,comp_dict.values()): return True else: return False def mk_dict(self,data): col_names = self.get_col_keys() return dict( zip(col_names,data) ) def get_col_keys(self): col_keys = map(lambda x: x.name,filter(lambda x: not x.primary_key and not x.foreign_keys,self.__table__.c._data.values())) return col_keys def get_data(self,col_keys=None): if col_keys is None: col_keys = self.get_col_keys() data_dict = dict(map(lambda x: x,filter(lambda x: x[0] in col_keys,self.__dict__.items()))) return data_dict def clone(self): node = self.__class__() col_keys = self.get_col_keys() for k in col_keys: setattr(node, k, getattr(self,k)) return node def clone_all(self, node_cnt = 0): node = self.clone() node_cnt = node_cnt + 1 for item in self.children: clone,node_cnt = item.clone_all(node_cnt) node.children.append(clone) return node,node_cnt def copy_data(self,node,col_keys = None): if col_keys is None: col_keys = self.get_col_keys() # print col_keys for k in col_keys: if isinstance(getattr(node,k),datetime): continue # Do not copy modification date setattr(self, k, getattr(node,k)) return self def copy_data_all(self,node,col_keys = None,node_cnt = 0): Ses = sessionmaker() session = Ses.object_session(self) node_cnt = node_cnt +1 if col_keys is None: col_keys = self.get_col_keys() if self.has_diff(node.get_data(col_keys)): self.copy_data(node,col_keys) node_cnt = node_cnt +1 print "Non differences found" for child in node.children: local_child = self.__class__() print "looking for : '%s'" % child.get_node_name() local_child = local_child.get_node(session,child.get_node_name().split(':')) if not local_child.is_complete: print "Node: '%s' not found. Creating node and children" % child.get_node_name() local_child,node_cnt = child.clone_all() self.children.append(local_child) print local_child session.commit() else: node_cnt = local_child.copy_data_all(child,col_keys,node_cnt) return node_cnt def get_session(self): Ses = sessionmaker() session = Ses.object_session(self) return session def __repr__(self): try: k =self.__table__.c._data.keys() except: ret = 'empty object' else: data_dict = filter(lambda x: x[0].find('_sa_') < 0 and (x[0] in k),self.__dict__.items()) if len(data_dict): ret = "<" + self.__table__.name + "("+reduce(lambda x,y: x + ", " + y, map(lambda x: str(x[0]) + "=" + str(x[1]) , data_dict)) + ")>" else: ret = 'empty object' return ret class Crate(Base,Node): ''' Class used to handle rows of the Detector_Hierarchy table in the data base. This class needs the sqlalchemy module. Parameters: Methods: ''' __table__ = Base.metadata.tables['Crate'] # Could be __tablename__='Crate'. 'Crate' is known by Base.metadata class. children = relationship("Module", cascade="all", backref=backref("parent", uselist=False) ) parent = None class Module(Base,Node): ''' Class used to handle rows of the Detector_Hierarchy table in the data base. This class needs the sqlalchemy module. ''' __table__ = Base.metadata.tables['Module'] fk_name = 'crateid' key_name = 'slot' ForeignKeyConstraint([__table__.c.crateid],['Crate.crateid']) children = relationship("Channel", cascade="all", backref=backref("parent", uselist=False) ) class Channel(Base,Node): ''' Class used to handle rows of the Detector_Hierarchy table in the data base. This class needs the sqlalchemy module. Parameters: Methods: ''' __table__ = Base.metadata.tables['Channel'] fk_name = 'moduleid' key_name = 'channel' detector_hierarchy = relationship('Detector_Hierarchy', backref=backref("channel", uselist=False, lazy='joined')) ForeignKeyConstraint([__table__.c.moduleid],['Module.moduleid']) children = [] ''' class Voltages(Base,Node): """ Class used to handle rows of the Detector_Hierarchy table in the data base. This class needs the sqlalchemy module. Parameters: Methods: """ __table__ = Base.metadata.tables['Voltages'] level = 0 #col_names = ('name','channel','system','col_name','enable') # Column names excluding primary and foreign keys. key_name = 'Vop_avg' detector_hierarchy = relationship('Detector_Hierarchy', backref=backref("voltages", uselist=False, lazy='joined')) parent = None children = [] ''' class Detector_Hierarchy(Base,Node): ''' Class used to handle rows of the Detector_Hierarchy table in the data base. This class needs the sqlalchemy module. ''' __table__ = Base.metadata.tables['Detector_Hierarchy'] level = 0 root_name = 'BCAL' # Root name must be define for tree structured tables only. tree_type = ('Detector','Discriminator','Side','Wedge','Number') # Types of tree must be define. fk_name = 'chanid' ForeignKeyConstraint([__table__.c.parent_id],['Detector_Hierarchy.id']) ForeignKeyConstraint([__table__.c.chanid],['Channel.chanid']) # ForeignKeyConstraint([__table__.c.volt_id],['Voltages.volt_id']) children = relationship("Detector_Hierarchy", cascade="all", backref=backref("parent",remote_side=[__table__.c.id]) ) global time_format def fill(self,session,data,fk): # node refers to the starting point node. starting_name = self.name start_index = data.index(starting_name)+1 if starting_name else 0 node = self for i in range(start_index,len(data)-1): node_dict = dict( zip( node.col_names,( data[i],node.tree_type[i],node.id,datetime.now().strftime(time_format) ) ) ) node = node.insert(session,node_dict) # The commit function at the end fill the id column. i = len(data)-1; node_dict = dict( zip( node.col_names,( data[i],node.tree_type[i],node.id,datetime.now().strftime(time_format) ) ) ) node.insert(session,node_dict,fk) return node def get_leaves(self): leaves = [] for item in self.children: if not item.children: leaves.append(item) else: le = item.get_leaves() leaves.append(le) return flatten(leaves) return Detector_Hierarchy, Crate, Module, Channel def testClasses(): ''' ''' engine = create_engine('sqlite:///tt.db', echo=False) # Create a connector for the data base SQLite Detector_Hierarchy, Crate, Module, Channel = make_classes() Session = sessionmaker(bind=engine) # Bind to the data base. session = Session() # Connect to the data base (will connect on the first rollback, commit). cr = Crate() node_data = ('Cr_Test','51','1729','Here','N/A','test','1','0',None) data_dict = cr.mk_dict(node_data) cr = cr.get_node(session,data_dict) if not cr.is_complete: print 'New crate "Cr_Test" created' cr = Crate() cr = cr.insert(session,data_dict) else: print 'Crate "Cr_Test" removed' session.delete(cr) session.commit() mo = Module() node_data = ('Mo_Test','Everything','N/A') data_dict = mo.mk_dict(node_data) mo = mo.get_node(session,data_dict,cr.crateid) if not mo.is_complete: print 'New module "Mo_Test" created' mo = Module() mo = mo.insert(session,data_dict,cr.crateid) else: print 'Module "Mo_Test" removed' session.delete(mo) session.commit() ch = Channel() node_data = ('Ch_Test', '1', 'Earth', 'N/A', '1') data_dict = ch.mk_dict(node_data) ch = ch.get_node(session,data_dict,mo.moduleid) if not ch.is_complete: ch = Channel() print 'New channel "Ch_Test" created' ch = ch.insert(session,data_dict,mo.moduleid) else: print 'Channel "Ch_Test" removed' session.delete(ch) session.commit() dh = Detector_Hierarchy() data = 'DH_test_root','DH_Test','U','51',100 dh = dh.get_node(session,data) if dh.is_complete: print 'Branch "DH_Test" starting from DH_test_root removed' print map(lambda x: str(x),data) for i in range(0,len(data)): print "Erasing: %s"%str(data[:len(data)-i]) dh = dh.get_node(session,data[:len(data)-i]) session.delete(dh) else: dh = Detector_Hierarchy() dh.root_name = 'DH_test_root' dh.fill(session,data,ch.chanid) print 'New branch "DH_Test" starting from DH_test_root created' session.commit() session.close() if __name__ == "__main__": # Do something if it gets excecuted alone. testClasses()