Code cleaning
This commit is contained in:
parent
ad31ddc699
commit
6bbacce38a
15 changed files with 186 additions and 124 deletions
6
.pylintrc
Normal file
6
.pylintrc
Normal file
|
@ -0,0 +1,6 @@
|
|||
[MESSAGES CONTROL]
|
||||
disable=invalid-name,
|
||||
locally-disabled,
|
||||
too-many-arguments,
|
||||
too-many-branches,
|
||||
line-too-long,
|
|
@ -1,6 +1,6 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
""" Email helpers """
|
||||
""" Email client to forge and send emails """
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
""" LDAP server connection helper """
|
||||
|
||||
import copy
|
||||
import datetime
|
||||
|
@ -12,7 +14,9 @@ from ldap.controls.simple import RelaxRulesControl
|
|||
import ldap.modlist as modlist
|
||||
import pytz
|
||||
|
||||
class LdapServer(object): # pylint: disable=useless-object-inheritance
|
||||
|
||||
class LdapServer:
|
||||
""" LDAP server connection helper """ # pylint: disable=useless-object-inheritance
|
||||
|
||||
uri = None
|
||||
dn = None
|
||||
|
@ -39,6 +43,7 @@ class LdapServer(object): # pylint: disable=useless-object-inheritance
|
|||
self.logger.log(level, error)
|
||||
|
||||
def connect(self):
|
||||
""" Start connection to LDAP server """
|
||||
if self.con == 0:
|
||||
try:
|
||||
con = ldap.initialize(self.uri)
|
||||
|
@ -61,6 +66,7 @@ class LdapServer(object): # pylint: disable=useless-object-inheritance
|
|||
|
||||
@staticmethod
|
||||
def get_scope(scope):
|
||||
""" Map scope parameter to python-ldap value """
|
||||
if scope == 'base':
|
||||
return ldap.SCOPE_BASE # pylint: disable=no-member
|
||||
if scope == 'one':
|
||||
|
@ -70,6 +76,7 @@ class LdapServer(object): # pylint: disable=useless-object-inheritance
|
|||
raise Exception("Unknown LDAP scope '%s'" % scope)
|
||||
|
||||
def search(self, basedn, filterstr=None, attrs=None, sizelimit=0, scope=None):
|
||||
""" Run a search on LDAP server """
|
||||
res_id = self.con.search(
|
||||
basedn,
|
||||
self.get_scope(scope if scope else 'sub'),
|
||||
|
@ -88,10 +95,12 @@ class LdapServer(object): # pylint: disable=useless-object-inheritance
|
|||
return ret
|
||||
|
||||
def get_object(self, dn, filterstr=None, attrs=None):
|
||||
""" Retrieve a LDAP object specified by its DN """
|
||||
result = self.search(dn, filterstr=filterstr, scope='base', attrs=attrs)
|
||||
return result[dn] if dn in result else None
|
||||
|
||||
def paged_search(self, basedn, filterstr, attrs, scope='sub', pagesize=500):
|
||||
""" Run a paged search on LDAP server """
|
||||
assert not self.v2, "Paged search is not available on LDAP version 2"
|
||||
# Initialize SimplePagedResultsControl object
|
||||
page_control = SimplePagedResultsControl(
|
||||
|
@ -162,6 +171,7 @@ class LdapServer(object): # pylint: disable=useless-object-inheritance
|
|||
return ret
|
||||
|
||||
def add_object(self, dn, attrs):
|
||||
""" Add an object in LDAP directory """
|
||||
ldif = modlist.addModlist(attrs)
|
||||
try:
|
||||
self.logger.debug("LdapServer - Add %s", dn)
|
||||
|
@ -173,6 +183,7 @@ class LdapServer(object): # pylint: disable=useless-object-inheritance
|
|||
return False
|
||||
|
||||
def update_object(self, dn, old, new, ignore_attrs=None, relax=False):
|
||||
""" Update an object in LDAP directory """
|
||||
assert not relax or not self.v2, "Relax modification is not available on LDAP version 2"
|
||||
ldif = modlist.modifyModlist(
|
||||
old, new,
|
||||
|
@ -192,6 +203,7 @@ class LdapServer(object): # pylint: disable=useless-object-inheritance
|
|||
|
||||
@staticmethod
|
||||
def update_need(old, new, ignore_attrs=None):
|
||||
""" Check if an update is need on a LDAP object based on its old and new attributes values """
|
||||
ldif = modlist.modifyModlist(
|
||||
old, new,
|
||||
ignore_attr_types=ignore_attrs if ignore_attrs else []
|
||||
|
@ -202,6 +214,7 @@ class LdapServer(object): # pylint: disable=useless-object-inheritance
|
|||
|
||||
@staticmethod
|
||||
def get_changes(old, new, ignore_attrs=None):
|
||||
""" Retrieve changes (as modlist) on an object based on its old and new attributes values """
|
||||
return modlist.modifyModlist(
|
||||
old, new,
|
||||
ignore_attr_types=ignore_attrs if ignore_attrs else []
|
||||
|
@ -209,6 +222,7 @@ class LdapServer(object): # pylint: disable=useless-object-inheritance
|
|||
|
||||
@staticmethod
|
||||
def format_changes(old, new, ignore_attrs=None, prefix=None):
|
||||
""" Format changes (modlist) on an object based on its old and new attributes values to display/log it """
|
||||
msg = []
|
||||
for (op, attr, val) in modlist.modifyModlist(old, new, ignore_attr_types=ignore_attrs if ignore_attrs else []):
|
||||
if op == ldap.MOD_ADD: # pylint: disable=no-member
|
||||
|
@ -226,6 +240,7 @@ class LdapServer(object): # pylint: disable=useless-object-inheritance
|
|||
return '\n'.join(msg)
|
||||
|
||||
def rename_object(self, dn, new_rdn, new_sup=None, delete_old=True):
|
||||
""" Rename an object in LDAP directory """
|
||||
# If new_rdn is a complete DN, split new RDN and new superior DN
|
||||
if len(new_rdn.split(',')) > 1:
|
||||
self.logger.debug(
|
||||
|
@ -261,6 +276,7 @@ class LdapServer(object): # pylint: disable=useless-object-inheritance
|
|||
return False
|
||||
|
||||
def drop_object(self, dn):
|
||||
""" Drop an object in LDAP directory """
|
||||
try:
|
||||
self.logger.debug("LdapServer - Delete %s", dn)
|
||||
self.con.delete_s(dn)
|
||||
|
@ -272,10 +288,12 @@ class LdapServer(object): # pylint: disable=useless-object-inheritance
|
|||
|
||||
@staticmethod
|
||||
def get_dn(obj):
|
||||
""" Retreive an on object DN from its entry in LDAP search result """
|
||||
return obj[0][0]
|
||||
|
||||
@staticmethod
|
||||
def get_attr(obj, attr, all=None, default=None):
|
||||
""" Retreive an on object attribute value(s) from the object entry in LDAP search result """
|
||||
if attr not in obj:
|
||||
for k in obj:
|
||||
if k.lower() == attr.lower():
|
||||
|
@ -289,13 +307,18 @@ class LdapServer(object): # pylint: disable=useless-object-inheritance
|
|||
return obj[attr][0]
|
||||
return default
|
||||
|
||||
|
||||
class LdapServerException(BaseException):
|
||||
""" Generic exception raised by LdapServer """
|
||||
|
||||
def __init__(self, msg):
|
||||
BaseException.__init__(self, msg)
|
||||
|
||||
#
|
||||
# Helpers
|
||||
# LDAP date string helpers
|
||||
#
|
||||
|
||||
|
||||
def parse_datetime(value, to_timezone=None, default_timezone=None, naive=None):
|
||||
"""
|
||||
Convert LDAP date string to datetime.datetime object
|
||||
|
@ -335,6 +358,7 @@ def parse_datetime(value, to_timezone=None, default_timezone=None, naive=None):
|
|||
return date.astimezone(to_timezone)
|
||||
return date
|
||||
|
||||
|
||||
def parse_date(value, to_timezone=None, default_timezone=None, naive=None):
|
||||
"""
|
||||
Convert LDAP date string to datetime.date object
|
||||
|
@ -348,6 +372,7 @@ def parse_date(value, to_timezone=None, default_timezone=None, naive=None):
|
|||
"""
|
||||
return parse_datetime(value, to_timezone, default_timezone, naive).date()
|
||||
|
||||
|
||||
def format_datetime(value, from_timezone=None, to_timezone=None, naive=None):
|
||||
"""
|
||||
Convert datetime.datetime object to LDAP date string
|
||||
|
@ -388,6 +413,7 @@ def format_datetime(value, from_timezone=None, to_timezone=None, naive=None):
|
|||
datestring = datestring.replace('+0000', 'Z')
|
||||
return datestring
|
||||
|
||||
|
||||
def format_date(value, from_timezone=None, to_timezone=None, naive=None):
|
||||
"""
|
||||
Convert datetime.date object to LDAP date string
|
||||
|
|
|
@ -9,6 +9,7 @@ import MySQLdb
|
|||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MyDB:
|
||||
""" MySQL client """
|
||||
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
""" Opening hours helpers """
|
||||
|
||||
import datetime
|
||||
import re
|
||||
import time
|
||||
|
@ -10,6 +14,7 @@ date_format = '%d/%m/%Y'
|
|||
date_pattern = re.compile('^([0-9]{2})/([0-9]{2})/([0-9]{4})$')
|
||||
time_pattern = re.compile('^([0-9]{1,2})h([0-9]{2})?$')
|
||||
|
||||
|
||||
def easter_date(year):
|
||||
a = year // 100
|
||||
b = year % 100
|
||||
|
@ -28,6 +33,7 @@ def easter_date(year):
|
|||
month = n
|
||||
return datetime.date(year, month, day)
|
||||
|
||||
|
||||
def nonworking_french_public_days_of_the_year(year=None):
|
||||
if year is None:
|
||||
year = datetime.date.today().year
|
||||
|
@ -49,6 +55,7 @@ def nonworking_french_public_days_of_the_year(year=None):
|
|||
'saint_etienne': datetime.date(year, 12, 26),
|
||||
}
|
||||
|
||||
|
||||
def parse_exceptional_closures(values):
|
||||
exceptional_closures = []
|
||||
for value in values:
|
||||
|
@ -106,7 +113,7 @@ def parse_normal_opening_hours(values):
|
|||
hours_periods = []
|
||||
words = value.strip().split()
|
||||
for word in words:
|
||||
if word=='':
|
||||
if not word:
|
||||
continue
|
||||
parts = word.split('-')
|
||||
if len(parts) == 1:
|
||||
|
@ -148,9 +155,12 @@ def parse_normal_opening_hours(values):
|
|||
normal_opening_hours.append({'days': days, 'hours_periods': hours_periods})
|
||||
return normal_opening_hours
|
||||
|
||||
def is_closed(normal_opening_hours_values=None, exceptional_closures_values=None,
|
||||
|
||||
def is_closed(
|
||||
normal_opening_hours_values=None, exceptional_closures_values=None,
|
||||
nonworking_public_holidays_values=None, exceptional_closure_on_nonworking_public_days=False,
|
||||
when=None, on_error='raise'):
|
||||
when=None, on_error='raise'
|
||||
):
|
||||
if not when:
|
||||
when = datetime.datetime.now()
|
||||
when_date = when.date()
|
||||
|
@ -171,11 +181,11 @@ def is_closed(normal_opening_hours_values=None, exceptional_closures_values=None
|
|||
log.debug("Non working day: %s", day)
|
||||
return {'closed': True, 'exceptional_closure': exceptional_closure_on_nonworking_public_days, 'exceptional_closure_all_day': exceptional_closure_on_nonworking_public_days}
|
||||
|
||||
if len(exceptional_closures_values)>0:
|
||||
if exceptional_closures_values:
|
||||
try:
|
||||
exceptional_closures = parse_exceptional_closures(exceptional_closures_values)
|
||||
log.debug('Exceptional closures: %s', exceptional_closures)
|
||||
except Exception as e:
|
||||
except ValueError as e:
|
||||
log.error("Fail to parse exceptional closures, consider as closed", exc_info=True)
|
||||
if on_error_result is None:
|
||||
raise e from e
|
||||
|
@ -195,7 +205,7 @@ def is_closed(normal_opening_hours_values=None, exceptional_closures_values=None
|
|||
try:
|
||||
normal_opening_hours = parse_normal_opening_hours(normal_opening_hours_values)
|
||||
log.debug('Normal opening hours: %s', normal_opening_hours)
|
||||
except Exception:
|
||||
except ValueError as e: # pylint: disable=broad-except
|
||||
log.error("Fail to parse normal opening hours, consider as closed", exc_info=True)
|
||||
if on_error_result is None:
|
||||
raise e from e
|
||||
|
|
|
@ -8,7 +8,8 @@ import progressbar
|
|||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
class Pbar(object): # pylint: disable=useless-object-inheritance
|
||||
|
||||
class Pbar: # pylint: disable=useless-object-inheritance
|
||||
"""
|
||||
Progress bar
|
||||
|
||||
|
|
|
@ -10,6 +10,7 @@ import psycopg2
|
|||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class PgDB:
|
||||
""" PostgreSQL client """
|
||||
|
||||
|
@ -34,7 +35,11 @@ class PgDB:
|
|||
""" Connect to PostgreSQL server """
|
||||
if self.con == 0:
|
||||
try:
|
||||
con = psycopg2.connect("dbname='%s' user='%s' host='%s' password='%s'" % (self.db,self.user,self.host,self.pwd))
|
||||
con = psycopg2.connect(
|
||||
"dbname='%s' user='%s' host='%s' password='%s'" % (
|
||||
self.db, self.user, self.host, self.pwd
|
||||
)
|
||||
)
|
||||
self.con = con
|
||||
except Exception:
|
||||
log.fatal('An error occured during Postgresql database connection.', exc_info=1)
|
||||
|
|
|
@ -8,7 +8,8 @@ import logging
|
|||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
class Report(object): # pylint: disable=useless-object-inheritance
|
||||
|
||||
class Report: # pylint: disable=useless-object-inheritance
|
||||
""" Logging report """
|
||||
|
||||
content = []
|
||||
|
|
|
@ -5,8 +5,8 @@ import datetime
|
|||
import logging
|
||||
import sys
|
||||
|
||||
import argparse
|
||||
import getpass
|
||||
from mako.template import Template as MakoTemplate
|
||||
|
||||
from mylib.scripts.helpers import get_opts_parser, add_email_opts
|
||||
from mylib.scripts.helpers import init_logging, init_email_client
|
||||
|
@ -14,6 +14,7 @@ from mylib.scripts.helpers import init_logging, init_email_client
|
|||
|
||||
log = logging.getLogger('mylib.scripts.email_test')
|
||||
|
||||
|
||||
def main(argv=None): # pylint: disable=too-many-locals,too-many-statements
|
||||
""" Script main """
|
||||
if argv is None:
|
||||
|
|
|
@ -9,8 +9,9 @@ from mylib.email import EmailClient
|
|||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def init_logging(options, name, report=None):
|
||||
""" Initialize logs """
|
||||
""" Initialize logging from calling script options """
|
||||
logformat = '%(asctime)s - ' + name + ' - %(levelname)s - %(message)s'
|
||||
if options.debug:
|
||||
loglevel = logging.DEBUG
|
||||
|
@ -172,6 +173,7 @@ def add_email_opts(parser):
|
|||
|
||||
|
||||
def init_email_client(options, **kwargs):
|
||||
""" Initialize email client from calling script options """
|
||||
log.info('Initialize Email client')
|
||||
return EmailClient(
|
||||
smtp_host=options.email_smtp_host,
|
||||
|
|
|
@ -15,6 +15,7 @@ from mylib.scripts.helpers import init_logging
|
|||
|
||||
log = logging.getLogger('mylib.scripts.ldap_test')
|
||||
|
||||
|
||||
def main(argv=None): # pylint: disable=too-many-locals,too-many-statements
|
||||
""" Script main """
|
||||
if argv is None:
|
||||
|
@ -24,6 +25,9 @@ def main(argv=None): #pylint: disable=too-many-locals,too-many-statements
|
|||
parser = get_opts_parser(just_try=True)
|
||||
options = parser.parse_args()
|
||||
|
||||
# Initialize logs
|
||||
init_logging(options, 'Test LDAP helpers')
|
||||
|
||||
now = datetime.datetime.now().replace(tzinfo=dateutil.tz.tzlocal())
|
||||
print("Now = %s" % now)
|
||||
|
||||
|
@ -50,7 +54,6 @@ def main(argv=None): #pylint: disable=too-many-locals,too-many-statements
|
|||
print("format_date (to_timezone=Tokyo) : %s" % format_date(now, to_timezone='Asia/Tokyo'))
|
||||
print("format_date (naive=True) : %s" % format_date(now, naive=True))
|
||||
|
||||
|
||||
print("parse_datetime : %s" % parse_datetime(datestring_now))
|
||||
print("parse_datetime (default_timezone=utc) : %s" % parse_datetime(datestring_now[0:-1], default_timezone=pytz.utc))
|
||||
print("parse_datetime (default_timezone=local) : %s" % parse_datetime(datestring_now[0:-1], default_timezone=dateutil.tz.tzlocal()))
|
||||
|
|
|
@ -12,6 +12,7 @@ from mylib.scripts.helpers import init_logging
|
|||
|
||||
log = logging.getLogger('mylib.scripts.pbar_test')
|
||||
|
||||
|
||||
def main(argv=None): # pylint: disable=too-many-locals,too-many-statements
|
||||
""" Script main """
|
||||
if argv is None:
|
||||
|
|
|
@ -11,6 +11,7 @@ from mylib.scripts.helpers import init_logging, init_email_client
|
|||
|
||||
log = logging.getLogger('mylib.scripts.report_test')
|
||||
|
||||
|
||||
def main(argv=None): # pylint: disable=too-many-locals,too-many-statements
|
||||
""" Script main """
|
||||
if argv is None:
|
||||
|
|
2
setup.cfg
Normal file
2
setup.cfg
Normal file
|
@ -0,0 +1,2 @@
|
|||
[flake8]
|
||||
ignore = E501
|
|
@ -43,6 +43,7 @@ def test_parse_exceptional_closures_full_days_period():
|
|||
}
|
||||
]
|
||||
|
||||
|
||||
def test_parse_exceptional_closures_invalid_days_period():
|
||||
with pytest.raises(ValueError):
|
||||
opening_hours.parse_exceptional_closures(["22/09/2017-21/09/2017"])
|
||||
|
@ -156,6 +157,7 @@ def test_parse_normal_opening_hours_multiple_periods():
|
|||
# Tests on is_closed
|
||||
#
|
||||
|
||||
|
||||
exceptional_closures = ["22/09/2017", "20/09/2017-22/09/2017", "20/09/2017-22/09/2017 18/09/2017", "25/11/2017", "26/11/2017 9h30-12h30"]
|
||||
normal_opening_hours = ["lundi-mardi jeudi 9h30-12h30 14h-16h30", "mercredi vendredi 9h30-12h30 14h-17h"]
|
||||
nonworking_public_holidays = [
|
||||
|
|
Loading…
Reference in a new issue