diff --git a/HashMap.py b/HashMap.py index cdb033c..3441bd5 100644 --- a/HashMap.py +++ b/HashMap.py @@ -5,38 +5,38 @@ # # Mapping configuration # { -# '[dst key 1]': { # Key name in the result +# '[dst key 1]': { # Key name in the result # -# 'order': [int], # Processing order between destinations keys +# 'order': [int], # Processing order between destinations keys # -# # Source values -# 'other_key': [key], # Other key of the destination to use as source of values -# 'key' : '[src key]', # Key of source hash to get source values -# 'keys' : ['[sk1]', '[sk2]', ...], # List of source hash's keys to get source values +# # Source values +# 'other_key': [key], # Other key of the destination to use as source of values +# 'key' : '[src key]', # Key of source hash to get source values +# 'keys' : ['[sk1]', '[sk2]', ...], # List of source hash's keys to get source values # -# # Clean / convert values -# 'cleanRegex': '[regex]', # Regex that be use to remove unwanted characters. Ex : [^0-9+] -# 'convert': [function], # Function to use to convert value : Original value will be passed -# # as argument and the value retrieve will replace source value in -# # the result -# # Ex : -# # lambda x: x.strip() -# # lambda x: "myformat : %s" % x -# # Deduplicate / check values -# 'deduplicate': [bool], # If True, sources values will be depluplicated -# 'check': [function], # Function to use to check source value : Source value will be passed -# # as argument and if function return True, the value will be preserved -# # Ex : -# # lambda x: x in my_global_hash -# # Join values -# 'join': '[glue]', # If present, sources values will be join using the "glue" +# # Clean / convert values +# 'cleanRegex': '[regex]', # Regex that be use to remove unwanted characters. Ex : [^0-9+] +# 'convert': [function], # Function to use to convert value : Original value will be passed +# # as argument and the value retrieve will replace source value in +# # the result +# # Ex : +# # lambda x: x.strip() +# # lambda x: "myformat : %s" % x +# # Deduplicate / check values +# 'deduplicate': [bool], # If True, sources values will be depluplicated +# 'check': [function], # Function to use to check source value : Source value will be passed +# # as argument and if function return True, the value will be preserved +# # Ex : +# # lambda x: x in my_global_hash +# # Join values +# 'join': '[glue]', # If present, sources values will be join using the "glue" # -# # Alternative mapping -# 'or': { [map configuration] } # If this mapping case does not retreive any value, try to get value(s) -# # with this other mapping configuration +# # Alternative mapping +# 'or': { [map configuration] } # If this mapping case does not retreive any value, try to get value(s) +# # with this other mapping configuration # }, # '[dst key 2]': { -# [...] +# [...] # } # } # @@ -50,125 +50,125 @@ import logging, re def clean_value(value): - if isinstance(value, int): - value=str(value) - return value.encode('utf8') + if isinstance(value, int): + value=str(value) + return value.encode('utf8') def map(map_keys,src,dst={}): - def get_values(dst_key,src,m): - # Extract sources values - values=[] - if 'other_key' in m: - if m['other_key'] in dst: - values=dst[m['other_key']] - if 'key' in m: - if m['key'] in src and src[m['key']]!='': - values.append(clean_value(src[m['key']])) + def get_values(dst_key,src,m): + # Extract sources values + values=[] + if 'other_key' in m: + if m['other_key'] in dst: + values=dst[m['other_key']] + if 'key' in m: + if m['key'] in src and src[m['key']]!='': + values.append(clean_value(src[m['key']])) - if 'keys' in m: - for key in m['keys']: - if key in src and src[key]!='': - values.append(clean_value(src[key])) + if 'keys' in m: + for key in m['keys']: + if key in src and src[key]!='': + values.append(clean_value(src[key])) - # Clean and convert values - if 'cleanRegex' in m and len(values)>0: - new_values=[] - for v in values: - nv=re.sub(m['cleanRegex'],'',v) - if nv!='': - new_values.append(nv) - values=new_values + # Clean and convert values + if 'cleanRegex' in m and len(values)>0: + new_values=[] + for v in values: + nv=re.sub(m['cleanRegex'],'',v) + if nv!='': + new_values.append(nv) + values=new_values - if 'convert' in m and len(values)>0: - new_values=[] - for v in values: - nv=m['convert'](v) - if nv!='': - new_values.append(nv) - values=new_values + if 'convert' in m and len(values)>0: + new_values=[] + for v in values: + nv=m['convert'](v) + if nv!='': + new_values.append(nv) + values=new_values - # Deduplicate values - if m.get('deduplicate') and len(values)>1: - new_values=[] - for v in values: - if v not in new_values: - new_values.append(v) - values=new_values - - # Check values - if 'check' in m and len(values)>0: - new_values=[] - for v in values: - if m['check'](v): - new_values.append(v) - else: - logging.debug('Invalid value %s for key %s' % (v,dst_key)) - if dst_key not in invalid_values: - invalid_values[dst_key]=[] - if v not in invalid_values[dst_key]: - invalid_values[dst_key].append(v) - values=new_values + # Deduplicate values + if m.get('deduplicate') and len(values)>1: + new_values=[] + for v in values: + if v not in new_values: + new_values.append(v) + values=new_values - # Join values - if 'join' in m and len(values)>1: - values=[m['join'].join(values)] + # Check values + if 'check' in m and len(values)>0: + new_values=[] + for v in values: + if m['check'](v): + new_values.append(v) + else: + logging.debug('Invalid value %s for key %s' % (v,dst_key)) + if dst_key not in invalid_values: + invalid_values[dst_key]=[] + if v not in invalid_values[dst_key]: + invalid_values[dst_key].append(v) + values=new_values - # Manage alternative mapping case - if len(values)==0 and 'or' in m: - values=get_values(dst_key,src,m['or']) + # Join values + if 'join' in m and len(values)>1: + values=[m['join'].join(values)] + + # Manage alternative mapping case + if len(values)==0 and 'or' in m: + values=get_values(dst_key,src,m['or']) - return values + return values - for dst_key in sorted(map_keys.keys(), key=lambda x: map_keys[x]['order']): - values=get_values(dst_key,src,map_keys[dst_key]) + for dst_key in sorted(map_keys.keys(), key=lambda x: map_keys[x]['order']): + values=get_values(dst_key,src,map_keys[dst_key]) - if len(values)==0: - if 'required' in map_keys[dst_key] and map_keys[dst_key]['required']: - logging.debug('Destination key %s could not be filled from source but is required' % dst_key) - return False - continue + if len(values)==0: + if 'required' in map_keys[dst_key] and map_keys[dst_key]['required']: + logging.debug('Destination key %s could not be filled from source but is required' % dst_key) + return False + continue - dst[dst_key]=values - return dst + dst[dst_key]=values + return dst if __name__ == '__main__': - logging.basicConfig(level=logging.DEBUG) + logging.basicConfig(level=logging.DEBUG) - src={ - 'uid': 'hmartin', - 'firstname': 'Martin', - 'lastname': 'Martin', - 'disp_name': 'Henri Martin', - 'line_1': '3 rue de Paris', - 'line_2': 'Pour Pierre', - 'zip_text': '92 120', - 'city_text': 'Montrouge', - 'line_city': '92120 Montrouge', - 'tel1': '01 00 00 00 00', - 'tel2': '09 00 00 00 00', - 'mobile': '06 00 00 00 00', - 'fax': '01 00 00 00 00', - 'email': 'H.MARTIN@GMAIL.COM', - } + src={ + 'uid': 'hmartin', + 'firstname': 'Martin', + 'lastname': 'Martin', + 'disp_name': 'Henri Martin', + 'line_1': '3 rue de Paris', + 'line_2': 'Pour Pierre', + 'zip_text': '92 120', + 'city_text': 'Montrouge', + 'line_city': '92120 Montrouge', + 'tel1': '01 00 00 00 00', + 'tel2': '09 00 00 00 00', + 'mobile': '06 00 00 00 00', + 'fax': '01 00 00 00 00', + 'email': 'H.MARTIN@GMAIL.COM', + } - map_c={ - 'uid': {'order': 0, 'key': 'uid','required': True}, - 'givenName': {'order': 1, 'key': 'firstname'}, - 'sn': {'order': 2, 'key': 'lastname'}, - 'cn': {'order': 3, 'key': 'disp_name','required': True, 'or': {'attrs': ['firstname','lastname'],'join': ' '}}, - 'displayName': {'order': 4, 'other_key': 'displayName'}, - 'street': {'order': 5, 'join': ' / ', 'keys': ['ligne_1','ligne_2']}, - 'postalCode': {'order': 6, 'key': 'zip_text', 'cleanRegex': '[^0-9]'}, - 'l': {'order': 7, 'key': 'city_text'}, - 'postalAddress': {'order': 8, 'join': '$', 'keys': ['ligne_1','ligne_2','ligne_city']}, - 'telephoneNumber': {'order': 9, 'keys': ['tel1','tel2'], 'cleanRegex': '[^0-9+]', 'deduplicate': True}, - 'mobile': {'order': 10,'key': 'mobile'}, - 'facsimileTelephoneNumber': {'order': 11,'key': 'fax'}, - 'mail': {'order': 12,'key': 'email', 'convert': lambda x: x.lower().strip()} - } - - logging.debug('[TEST] Map src=%s / config= %s' % (src,map_c)) - logging.debug('[TEST] Result : %s' % map(map_c,src)) + map_c={ + 'uid': {'order': 0, 'key': 'uid','required': True}, + 'givenName': {'order': 1, 'key': 'firstname'}, + 'sn': {'order': 2, 'key': 'lastname'}, + 'cn': {'order': 3, 'key': 'disp_name','required': True, 'or': {'attrs': ['firstname','lastname'],'join': ' '}}, + 'displayName': {'order': 4, 'other_key': 'displayName'}, + 'street': {'order': 5, 'join': ' / ', 'keys': ['ligne_1','ligne_2']}, + 'postalCode': {'order': 6, 'key': 'zip_text', 'cleanRegex': '[^0-9]'}, + 'l': {'order': 7, 'key': 'city_text'}, + 'postalAddress': {'order': 8, 'join': '$', 'keys': ['ligne_1','ligne_2','ligne_city']}, + 'telephoneNumber': {'order': 9, 'keys': ['tel1','tel2'], 'cleanRegex': '[^0-9+]', 'deduplicate': True}, + 'mobile': {'order': 10,'key': 'mobile'}, + 'facsimileTelephoneNumber': {'order': 11,'key': 'fax'}, + 'mail': {'order': 12,'key': 'email', 'convert': lambda x: x.lower().strip()} + } + + logging.debug('[TEST] Map src=%s / config= %s' % (src,map_c)) + logging.debug('[TEST] Result : %s' % map(map_c,src)) diff --git a/MyDB.py b/MyDB.py index 09a7ddf..3d024e3 100644 --- a/MyDB.py +++ b/MyDB.py @@ -6,54 +6,54 @@ import sys class MyDB(object): - host = "" - user = "" - pwd = "" - db = "" + host = "" + user = "" + pwd = "" + db = "" - con = 0 + con = 0 - def __init__(self,host,user,pwd,db): - self.host = host - self.user = user - self.pwd = pwd - self.db = db + def __init__(self,host,user,pwd,db): + self.host = host + self.user = user + self.pwd = pwd + self.db = db - def connect(self): - if self.con == 0: - try: - con = MySQLdb.connect(self.host,self.user,self.pwd,self.db) - self.con = con - except Exception, e: - logging.fatal(e) - sys.exit(1) + def connect(self): + if self.con == 0: + try: + con = MySQLdb.connect(self.host,self.user,self.pwd,self.db) + self.con = con + except Exception, e: + logging.fatal(e) + sys.exit(1) - def doSQL(self,sql): - cursor = self.con.cursor() - try: - cursor.execute(sql) - self.con.commit() - return True - except Exception, e: - logging.error('Erreur durant la requete sql %s : %s' % (sql,e)) - self.con.rollback() - return False + def doSQL(self,sql): + cursor = self.con.cursor() + try: + cursor.execute(sql) + self.con.commit() + return True + except Exception, e: + logging.error('Erreur durant la requete sql %s : %s' % (sql,e)) + self.con.rollback() + return False - def doSelect(self,sql): - cursor = self.con.cursor() - try: - cursor.execute(sql) - results = cursor.fetchall() - return results - ret=[] - t=0 - for row in results: - c=0 - for field in row: - ret[t][c]=field - c=c+1 - t=t+1 - return ret - except Exception, e: - logging.error('Erreur durant la requete sql %s : %s' % (sql,e)) - return False + def doSelect(self,sql): + cursor = self.con.cursor() + try: + cursor.execute(sql) + results = cursor.fetchall() + return results + ret=[] + t=0 + for row in results: + c=0 + for field in row: + ret[t][c]=field + c=c+1 + t=t+1 + return ret + except Exception, e: + logging.error('Erreur durant la requete sql %s : %s' % (sql,e)) + return False diff --git a/PgDB.py b/PgDB.py index 7136fe9..d3c4b1e 100644 --- a/PgDB.py +++ b/PgDB.py @@ -9,21 +9,21 @@ import datetime class PgDB(object): - host = "" - user = "" - pwd = "" - db = "" + host = "" + user = "" + pwd = "" + db = "" - con = 0 + con = 0 - date_format = '%Y-%m-%d' - datetime_format = '%Y-%m-%d %H:%M:%S' + date_format = '%Y-%m-%d' + datetime_format = '%Y-%m-%d %H:%M:%S' - def __init__(self,host,user,pwd,db): - self.host = host - self.user = user - self.pwd = pwd - self.db = db + def __init__(self,host,user,pwd,db): + self.host = host + self.user = user + self.pwd = pwd + self.db = db def connect(self): if self.con == 0: @@ -43,32 +43,32 @@ class PgDB(object): sys.exit(1) return True - def close(self): - if self.con: - self.con.close() + def close(self): + if self.con: + self.con.close() - def setEncoding(self,enc): - if self.con: - try: - self.con.set_client_encoding(enc) - return True - except Exception: - logging.error('An error occured setting Postgresql database connection encoding to "%s"', enc, exc_info=1) - return False + def setEncoding(self,enc): + if self.con: + try: + self.con.set_client_encoding(enc) + return True + except Exception: + logging.error('An error occured setting Postgresql database connection encoding to "%s"', enc, exc_info=1) + return False - def doSQL(self,sql,params=None): - cursor = self.con.cursor() - try: - if params is None: - cursor.execute(sql) - else: - cursor.execute(sql,params) - self.con.commit() - return True - except Exception: - logging.error(u'Error during SQL request "%s"', sql.decode('utf-8', 'ignore'), exc_info=1) - self.con.rollback() - return False + def doSQL(self,sql,params=None): + cursor = self.con.cursor() + try: + if params is None: + cursor.execute(sql) + else: + cursor.execute(sql,params) + self.con.commit() + return True + except Exception: + logging.error(u'Error during SQL request "%s"', sql.decode('utf-8', 'ignore'), exc_info=1) + self.con.rollback() + return False def doSelect(self,sql,params): cursor = self.con.cursor() @@ -83,109 +83,109 @@ class PgDB(object): logging.error(u'Error during SQL request "%s"', sql.decode('utf-8', 'ignore'), exc_info=1) return False - # - # SQL helpers - # - def _quote_value(self, value): - if isinstance(value, int) or isinstance(value, float): - return unicode(value) + # + # SQL helpers + # + def _quote_value(self, value): + if isinstance(value, int) or isinstance(value, float): + return unicode(value) - if isinstance(value, str): - value = unicode(value) - elif isinstance(value, datetime.datetime): - value = unicode(self._format_datetime(value)) - elif isinstance(value, datetime.date): - value = unicode(self._format_date(value)) + if isinstance(value, str): + value = unicode(value) + elif isinstance(value, datetime.datetime): + value = unicode(self._format_datetime(value)) + elif isinstance(value, datetime.date): + value = unicode(self._format_date(value)) - return u"'%s'" % value.replace(u"'",u"''") + return u"'%s'" % value.replace(u"'",u"''") - def _format_where_clauses(self, where_clauses, where_op=u'AND'): - if isinstance(where_clauses, str): - return where_clauses - elif isinstance(where_clauses, list): - return (u" %s " % where_op).join(where_clauses) - elif isinstance(where_clauses, dict): - return (u" %s " % where_op).join(map(lambda x: "%s=%s" % (x, self._quote_value(where_clauses[x])), where_clauses)) - logging.error('Unsupported where clauses type %s', type(where_clauses)) - return False + def _format_where_clauses(self, where_clauses, where_op=u'AND'): + if isinstance(where_clauses, str): + return where_clauses + elif isinstance(where_clauses, list): + return (u" %s " % where_op).join(where_clauses) + elif isinstance(where_clauses, dict): + return (u" %s " % where_op).join(map(lambda x: "%s=%s" % (x, self._quote_value(where_clauses[x])), where_clauses)) + logging.error('Unsupported where clauses type %s', type(where_clauses)) + return False - def _format_datetime(self, datetime): - return datetime.strftime(self.datetime_format) + def _format_datetime(self, datetime): + return datetime.strftime(self.datetime_format) - def _format_date(self, date): - return date.strftime(self.date_format) + def _format_date(self, date): + return date.strftime(self.date_format) - def time2datetime(self, time): - return self._format_datetime(datetime.fromtimestamp(int(time))) + def time2datetime(self, time): + return self._format_datetime(datetime.fromtimestamp(int(time))) - def time2date(self, time): - return self._format_date(datetime.fromtimestamp(int(time))) + def time2date(self, time): + return self._format_date(datetime.fromtimestamp(int(time))) - def insert(self, table, values, just_try=False): - sql=u"INSERT INTO %s (%s) VALUES (%s)" % (table, u', '.join(values.keys()), u", ".join(map(lambda x: self._quote_value(values[x]), values))) + def insert(self, table, values, just_try=False): + sql=u"INSERT INTO %s (%s) VALUES (%s)" % (table, u', '.join(values.keys()), u", ".join(map(lambda x: self._quote_value(values[x]), values))) - if just_try: - logging.debug(u"Just-try mode : execute INSERT query : %s", sql) - return True + if just_try: + logging.debug(u"Just-try mode : execute INSERT query : %s", sql) + return True - logging.debug(sql) - if not self.doSQL(sql): - logging.error(u"Fail to execute INSERT query (SQL : %s)" % sql) - return False - return True + logging.debug(sql) + if not self.doSQL(sql): + logging.error(u"Fail to execute INSERT query (SQL : %s)" % sql) + return False + return True - def update(self, table, values, where_clauses, where_op=u'AND', just_try=False): - where=self._format_where_clauses(where_clauses, where_op=where_op) - if not where: - return False + def update(self, table, values, where_clauses, where_op=u'AND', just_try=False): + where=self._format_where_clauses(where_clauses, where_op=where_op) + if not where: + return False - sql=u"UPDATE %s SET %s WHERE %s" % (table, u", ".join(map(lambda x: "%s=%s" % (x, self._quote_value(values[x])), values)), where) + sql=u"UPDATE %s SET %s WHERE %s" % (table, u", ".join(map(lambda x: "%s=%s" % (x, self._quote_value(values[x])), values)), where) - if just_try: - logging.debug(u"Just-try mode : execute UPDATE query : %s", sql) - return True + if just_try: + logging.debug(u"Just-try mode : execute UPDATE query : %s", sql) + return True - logging.debug(sql) - if not self.doSQL(sql): - logging.error(u"Fail to execute UPDATE query (SQL : %s)", sql) - return False - return True + logging.debug(sql) + if not self.doSQL(sql): + logging.error(u"Fail to execute UPDATE query (SQL : %s)", sql) + return False + return True - def delete(self, table, where_clauses, where_op=u'AND', just_try=False): - where=self._format_where_clauses(where_clauses, where_op=where_op) - if not where: - return False + def delete(self, table, where_clauses, where_op=u'AND', just_try=False): + where=self._format_where_clauses(where_clauses, where_op=where_op) + if not where: + return False - sql=u"DELETE FROM %s WHERE %s" % (table, where) + sql=u"DELETE FROM %s WHERE %s" % (table, where) - if just_try: - logging.debug(u"Just-try mode : execute DELETE query : %s", sql) - return True + if just_try: + logging.debug(u"Just-try mode : execute DELETE query : %s", sql) + return True - logging.debug(sql) - if not self.doSQL(sql): - logging.error(u"Fail to execute DELETE query (SQL : %s)", sql) - return False - return True + logging.debug(sql) + if not self.doSQL(sql): + logging.error(u"Fail to execute DELETE query (SQL : %s)", sql) + return False + return True - def select(self, table, where_clauses=None, fields=None, where_op=u'AND', order_by=None, just_try=False): - sql = u"SELECT " - if fields is None: - sql += "*" - elif isinstance(fields, str) or isinstance(fields, unicode): - sql += fields - else: - sql += u", ".join(fields) + def select(self, table, where_clauses=None, fields=None, where_op=u'AND', order_by=None, just_try=False): + sql = u"SELECT " + if fields is None: + sql += "*" + elif isinstance(fields, str) or isinstance(fields, unicode): + sql += fields + else: + sql += u", ".join(fields) - sql += u" FROM " + table - if where_clauses: - where=self._format_where_clauses(where_clauses, where_op=where_op) - if not where: - return False + sql += u" FROM " + table + if where_clauses: + where=self._format_where_clauses(where_clauses, where_op=where_op) + if not where: + return False - sql += u" WHERE " + where + sql += u" WHERE " + where - if order_by: - sql += u"ORDER %s" % order_by + if order_by: + sql += u"ORDER %s" % order_by - return self.doSelect(sql) + return self.doSelect(sql) diff --git a/opening_hours.py b/opening_hours.py index cd50341..06bd879 100644 --- a/opening_hours.py +++ b/opening_hours.py @@ -2,206 +2,206 @@ import datetime, re, time, logging week_days=['lundi', 'mardi', 'mercredi', 'jeudi', 'vendredi', 'samedi', 'dimanche'] def easter_date(year): - a=year//100 - b=year%100 - c=(3*(a+25))//4 - d=(3*(a+25))%4 - e=(8*(a+11))//25 - f=(5*a+b)%19 - g=(19*f+c-e)%30 - h=(f+11*g)//319 - j=(60*(5-d)+b)//4 - k=(60*(5-d)+b)%4 - m=(2*j-k-g+h)%7 - n=(g-h+m+114)//31 - p=(g-h+m+114)%31 - day=p+1 - month=n - return datetime.date(year, month, day) + a=year//100 + b=year%100 + c=(3*(a+25))//4 + d=(3*(a+25))%4 + e=(8*(a+11))//25 + f=(5*a+b)%19 + g=(19*f+c-e)%30 + h=(f+11*g)//319 + j=(60*(5-d)+b)//4 + k=(60*(5-d)+b)%4 + m=(2*j-k-g+h)%7 + n=(g-h+m+114)//31 + p=(g-h+m+114)%31 + day=p+1 + month=n + return datetime.date(year, month, day) def nonworking_french_public_days_of_the_year(year=None): - if year is None: - year=datetime.date.today().year - dp=easter_date(year) - return { - '1janvier': datetime.date(year, 1, 1), - 'paques': dp, - 'lundi_paques': (dp+datetime.timedelta(1)), - '1mai': datetime.date(year, 5, 1), - '8mai': datetime.date(year, 5, 8), - 'jeudi_ascension': (dp+datetime.timedelta(39)), - 'pentecote': (dp+datetime.timedelta(49)), - 'lundi_pentecote': (dp+datetime.timedelta(50)), - '14juillet': datetime.date(year, 7, 14), - '15aout': datetime.date(year, 8, 15), - '1novembre': datetime.date(year, 11, 1), - '11novembre': datetime.date(year, 11, 11), - 'noel': datetime.date(year, 12, 25), - 'saint_etienne': datetime.date(year, 12, 26), - } + if year is None: + year=datetime.date.today().year + dp=easter_date(year) + return { + '1janvier': datetime.date(year, 1, 1), + 'paques': dp, + 'lundi_paques': (dp+datetime.timedelta(1)), + '1mai': datetime.date(year, 5, 1), + '8mai': datetime.date(year, 5, 8), + 'jeudi_ascension': (dp+datetime.timedelta(39)), + 'pentecote': (dp+datetime.timedelta(49)), + 'lundi_pentecote': (dp+datetime.timedelta(50)), + '14juillet': datetime.date(year, 7, 14), + '15aout': datetime.date(year, 8, 15), + '1novembre': datetime.date(year, 11, 1), + '11novembre': datetime.date(year, 11, 11), + 'noel': datetime.date(year, 12, 25), + 'saint_etienne': datetime.date(year, 12, 26), + } def parse_exceptional_closures(values): - exceptional_closures=[] - date_pattern=re.compile('^([0-9]{2})/([0-9]{2})/([0-9]{4})$') - time_pattern=re.compile('^([0-9]{1,2})h([0-9]{2})?$') - for value in values: - days=[] - hours_periods=[] - words=value.strip().split() - for word in words: - if word=='': - continue - parts=word.split('-') - if len(parts)==1: - # ex : 31/02/2017 - ptime=time.strptime(word,'%d/%m/%Y') - date=datetime.date(ptime.tm_year, ptime.tm_mon, ptime.tm_mday) - if date not in days: - days.append(date) - elif len(parts)==2: - # ex : 18/12/2017-20/12/2017 ou 9h-10h30 - if date_pattern.match(parts[0]) and date_pattern.match(parts[1]): - # ex : 18/12/2017-20/12/2017 - pstart=time.strptime(parts[0],'%d/%m/%Y') - pstop=time.strptime(parts[1],'%d/%m/%Y') - if pstop<=pstart: - raise ValueError('Day %s <= %s' % (parts[1],parts[0])) + exceptional_closures=[] + date_pattern=re.compile('^([0-9]{2})/([0-9]{2})/([0-9]{4})$') + time_pattern=re.compile('^([0-9]{1,2})h([0-9]{2})?$') + for value in values: + days=[] + hours_periods=[] + words=value.strip().split() + for word in words: + if word=='': + continue + parts=word.split('-') + if len(parts)==1: + # ex : 31/02/2017 + ptime=time.strptime(word,'%d/%m/%Y') + date=datetime.date(ptime.tm_year, ptime.tm_mon, ptime.tm_mday) + if date not in days: + days.append(date) + elif len(parts)==2: + # ex : 18/12/2017-20/12/2017 ou 9h-10h30 + if date_pattern.match(parts[0]) and date_pattern.match(parts[1]): + # ex : 18/12/2017-20/12/2017 + pstart=time.strptime(parts[0],'%d/%m/%Y') + pstop=time.strptime(parts[1],'%d/%m/%Y') + if pstop<=pstart: + raise ValueError('Day %s <= %s' % (parts[1],parts[0])) - date=datetime.date(pstart.tm_year, pstart.tm_mon, pstart.tm_mday) - stop_date=datetime.date(pstop.tm_year, pstart.tm_mon, pstart.tm_mday) - while date<=stop_date: - if date not in days: - days.append(date) - date+=datetime.timedelta(days=1) - else: - # ex : 9h-10h30 - mstart=time_pattern.match(parts[0]) - mstop=time_pattern.match(parts[1]) - if not mstart or not mstop: - raise ValueError('"%s" is not a valid time period' % word) - hstart=datetime.time(int(mstart.group(1)), int(mstart.group(2) or 0)) - hstop=datetime.time(int(mstop.group(1)), int(mstop.group(2) or 0)) - if hstop<=hstart: - raise ValueError('Time %s <= %s' % (parts[1],parts[0])) - hours_periods.append({'start': hstart, 'stop': hstop}) - else: - raise ValueError('Invalid number of part in this word : "%s"' % word) - if not days: - raise ValueError('No days found in value "%s"' % word) - exceptional_closures.append({'days': days, 'hours_periods': hours_periods}) - return exceptional_closures + date=datetime.date(pstart.tm_year, pstart.tm_mon, pstart.tm_mday) + stop_date=datetime.date(pstop.tm_year, pstart.tm_mon, pstart.tm_mday) + while date<=stop_date: + if date not in days: + days.append(date) + date+=datetime.timedelta(days=1) + else: + # ex : 9h-10h30 + mstart=time_pattern.match(parts[0]) + mstop=time_pattern.match(parts[1]) + if not mstart or not mstop: + raise ValueError('"%s" is not a valid time period' % word) + hstart=datetime.time(int(mstart.group(1)), int(mstart.group(2) or 0)) + hstop=datetime.time(int(mstop.group(1)), int(mstop.group(2) or 0)) + if hstop<=hstart: + raise ValueError('Time %s <= %s' % (parts[1],parts[0])) + hours_periods.append({'start': hstart, 'stop': hstop}) + else: + raise ValueError('Invalid number of part in this word : "%s"' % word) + if not days: + raise ValueError('No days found in value "%s"' % word) + exceptional_closures.append({'days': days, 'hours_periods': hours_periods}) + return exceptional_closures def parse_normal_opening_hours(values): - normal_opening_hours=[] - time_pattern=re.compile('^([0-9]{1,2})h([0-9]{2})?$') - for value in values: - days=[] - hours_periods=[] - words=value.strip().split() - for word in words: - if word=='': - continue - parts=word.split('-') - if len(parts)==1: - # ex : jeudi - if word not in week_days: - raise ValueError('"%s" is not a valid week day' % word) - if word not in days: - days.append(word) - elif len(parts)==2: - # ex : lundi-jeudi ou 9h-10h30 - if parts[0] in week_days and parts[1] in week_days: - # ex : lundi-jeudi - if week_days.index(parts[1]) <= week_days.index(parts[0]): - raise ValueError('"%s" is before "%s"' % (parts[1],parts[0])) - started=False - for d in week_days: - if not started and d!=parts[0]: - continue - started=True - if d not in days: - days.append(d) - if d==parts[1]: - break - else: - #ex : 9h-10h30 - mstart=time_pattern.match(parts[0]) - mstop=time_pattern.match(parts[1]) - if not mstart or not mstop: - raise ValueError('"%s" is not a valid time period' % word) - hstart=datetime.time(int(mstart.group(1)), int(mstart.group(2) or 0)) - hstop=datetime.time(int(mstop.group(1)), int(mstop.group(2) or 0)) - if hstop<=hstart: - raise ValueError('Time %s <= %s' % (parts[1],parts[0])) - hours_periods.append({'start': hstart, 'stop': hstop}) - else: - raise ValueError('Invalid number of part in this word : "%s"' % word) - if not days and not hours_periods: - raise ValueError('No days or hours period found in this value : "%s"' % value) - normal_opening_hours.append({'days': days, 'hours_periods': hours_periods}) - return normal_opening_hours + normal_opening_hours=[] + time_pattern=re.compile('^([0-9]{1,2})h([0-9]{2})?$') + for value in values: + days=[] + hours_periods=[] + words=value.strip().split() + for word in words: + if word=='': + continue + parts=word.split('-') + if len(parts)==1: + # ex : jeudi + if word not in week_days: + raise ValueError('"%s" is not a valid week day' % word) + if word not in days: + days.append(word) + elif len(parts)==2: + # ex : lundi-jeudi ou 9h-10h30 + if parts[0] in week_days and parts[1] in week_days: + # ex : lundi-jeudi + if week_days.index(parts[1]) <= week_days.index(parts[0]): + raise ValueError('"%s" is before "%s"' % (parts[1],parts[0])) + started=False + for d in week_days: + if not started and d!=parts[0]: + continue + started=True + if d not in days: + days.append(d) + if d==parts[1]: + break + else: + #ex : 9h-10h30 + mstart=time_pattern.match(parts[0]) + mstop=time_pattern.match(parts[1]) + if not mstart or not mstop: + raise ValueError('"%s" is not a valid time period' % word) + hstart=datetime.time(int(mstart.group(1)), int(mstart.group(2) or 0)) + hstop=datetime.time(int(mstop.group(1)), int(mstop.group(2) or 0)) + if hstop<=hstart: + raise ValueError('Time %s <= %s' % (parts[1],parts[0])) + hours_periods.append({'start': hstart, 'stop': hstop}) + else: + raise ValueError('Invalid number of part in this word : "%s"' % word) + if not days and not hours_periods: + raise ValueError('No days or hours period found in this value : "%s"' % value) + normal_opening_hours.append({'days': days, 'hours_periods': hours_periods}) + return normal_opening_hours def is_closed(normal_opening_hours_values=[],exceptional_closures_values=[],nonworking_public_holidays_values=[], when=datetime.datetime.now(), on_error='raise', exceptional_closure_on_nonworking_public_days=False): - when_date=when.date() - when_time=when.time() - when_weekday=week_days[when.timetuple().tm_wday] - on_error_result=None - if on_error=='closed': - on_error_result={'closed': True, 'exceptional_closure': False, 'exceptional_closure_all_day': False} - elif on_error=='opened': - on_error_result={'closed': False, 'exceptional_closure': False, 'exceptional_closure_all_day': False} + when_date=when.date() + when_time=when.time() + when_weekday=week_days[when.timetuple().tm_wday] + on_error_result=None + if on_error=='closed': + on_error_result={'closed': True, 'exceptional_closure': False, 'exceptional_closure_all_day': False} + elif on_error=='opened': + on_error_result={'closed': False, 'exceptional_closure': False, 'exceptional_closure_all_day': False} - logging.debug("%s => %s / %s / %s" % (when, when_date, when_time, when_weekday)) - if len(nonworking_public_holidays_values)>0: - logging.debug("Nonworking public holidays : %s" % nonworking_public_holidays_values) - nonworking_days=nonworking_french_public_days_of_the_year() - for day in nonworking_public_holidays_values: - if day in nonworking_days and when_date==nonworking_days[day]: - logging.debug("Non working day : %s" % day) - return {'closed': True, 'exceptional_closure': exceptional_closure_on_nonworking_public_days, 'exceptional_closure_all_day': exceptional_closure_on_nonworking_public_days} + logging.debug("%s => %s / %s / %s" % (when, when_date, when_time, when_weekday)) + if len(nonworking_public_holidays_values)>0: + logging.debug("Nonworking public holidays : %s" % nonworking_public_holidays_values) + nonworking_days=nonworking_french_public_days_of_the_year() + for day in nonworking_public_holidays_values: + if day in nonworking_days and when_date==nonworking_days[day]: + logging.debug("Non working day : %s" % day) + return {'closed': True, 'exceptional_closure': exceptional_closure_on_nonworking_public_days, 'exceptional_closure_all_day': exceptional_closure_on_nonworking_public_days} - if len(exceptional_closures_values)>0: - try: - exceptional_closures=parse_exceptional_closures(exceptional_closures_values) - logging.debug('Exceptional closures : %s' % exceptional_closures) - except Exception, e: - logging.error("%s => Not closed by default" % e) - if on_error_result is None: - raise e - return on_error_result - for cl in exceptional_closures: - if when_date not in cl['days']: - logging.debug("when_date (%s) no in days (%s)" % (when_date,cl['days'])) - continue - if not cl['hours_periods']: - # All day exceptional closure - return {'closed': True, 'exceptional_closure': True, 'exceptional_closure_all_day': True} - for hp in cl['hours_periods']: - if hp['start']<=when_time and hp['stop']>= when_time: - return {'closed': True, 'exceptional_closure': True, 'exceptional_closure_all_day': False} + if len(exceptional_closures_values)>0: + try: + exceptional_closures=parse_exceptional_closures(exceptional_closures_values) + logging.debug('Exceptional closures : %s' % exceptional_closures) + except Exception, e: + logging.error("%s => Not closed by default" % e) + if on_error_result is None: + raise e + return on_error_result + for cl in exceptional_closures: + if when_date not in cl['days']: + logging.debug("when_date (%s) no in days (%s)" % (when_date,cl['days'])) + continue + if not cl['hours_periods']: + # All day exceptional closure + return {'closed': True, 'exceptional_closure': True, 'exceptional_closure_all_day': True} + for hp in cl['hours_periods']: + if hp['start']<=when_time and hp['stop']>= when_time: + return {'closed': True, 'exceptional_closure': True, 'exceptional_closure_all_day': False} - if len(normal_opening_hours_values)>0: - try: - normal_opening_hours=parse_normal_opening_hours(normal_opening_hours_values) - logging.debug('Normal opening hours : %s' % normal_opening_hours) - except Exception, e: - logging.error("%s => Not closed by default" % e) - if on_error_result is None: - raise e - return on_error_result - for oh in normal_opening_hours: - if oh['days'] and when_weekday not in oh['days']: - logging.debug("when_weekday (%s) no in days (%s)" % (when_weekday,oh['days'])) - continue - if not oh['hours_periods']: - # All day opened - return {'closed': False, 'exceptional_closure': False, 'exceptional_closure_all_day': False} - for hp in oh['hours_periods']: - if hp['start']<=when_time and hp['stop']>= when_time: - return {'closed': False, 'exceptional_closure': False, 'exceptional_closure_all_day': False} - logging.debug("Not in normal opening hours => closed") - return {'closed': True, 'exceptional_closure': False, 'exceptional_closure_all_day': False} + if len(normal_opening_hours_values)>0: + try: + normal_opening_hours=parse_normal_opening_hours(normal_opening_hours_values) + logging.debug('Normal opening hours : %s' % normal_opening_hours) + except Exception, e: + logging.error("%s => Not closed by default" % e) + if on_error_result is None: + raise e + return on_error_result + for oh in normal_opening_hours: + if oh['days'] and when_weekday not in oh['days']: + logging.debug("when_weekday (%s) no in days (%s)" % (when_weekday,oh['days'])) + continue + if not oh['hours_periods']: + # All day opened + return {'closed': False, 'exceptional_closure': False, 'exceptional_closure_all_day': False} + for hp in oh['hours_periods']: + if hp['start']<=when_time and hp['stop']>= when_time: + return {'closed': False, 'exceptional_closure': False, 'exceptional_closure_all_day': False} + logging.debug("Not in normal opening hours => closed") + return {'closed': True, 'exceptional_closure': False, 'exceptional_closure_all_day': False} - # Not a nonworking day, not during exceptional closure and no normal opening hours defined => Opened - return {'closed': False, 'exceptional_closure': False, 'exceptional_closure_all_day': False} + # Not a nonworking day, not during exceptional closure and no normal opening hours defined => Opened + return {'closed': False, 'exceptional_closure': False, 'exceptional_closure_all_day': False}