Compare commits
No commits in common. "83ce6b9d1bad83004deb8f550e0240d35d7cae36" and "815081d544f78fffca717e065602539631dd07e8" have entirely different histories.
83ce6b9d1b
...
815081d544
11 changed files with 102 additions and 161 deletions
|
@ -49,8 +49,8 @@ def pretty_format_dict(value, encoding='utf8', prefix=None):
|
||||||
result = []
|
result = []
|
||||||
for key in sorted(value.keys()):
|
for key in sorted(value.keys()):
|
||||||
result.append(
|
result.append(
|
||||||
f'{prefix}- {key} : '
|
f'{prefix}- {key} : ' +
|
||||||
+ pretty_format_value_in_list(
|
pretty_format_value_in_list(
|
||||||
value[key],
|
value[key],
|
||||||
encoding=encoding,
|
encoding=encoding,
|
||||||
prefix=prefix
|
prefix=prefix
|
||||||
|
@ -65,8 +65,8 @@ def pretty_format_list(row, encoding='utf8', prefix=None):
|
||||||
result = []
|
result = []
|
||||||
for idx, values in enumerate(row):
|
for idx, values in enumerate(row):
|
||||||
result.append(
|
result.append(
|
||||||
f'{prefix}- #{idx} : '
|
f'{prefix}- #{idx} : ' +
|
||||||
+ pretty_format_value_in_list(
|
pretty_format_value_in_list(
|
||||||
values,
|
values,
|
||||||
encoding=encoding,
|
encoding=encoding,
|
||||||
prefix=prefix
|
prefix=prefix
|
||||||
|
|
|
@ -135,10 +135,10 @@ class EmailClient(ConfigurableObject): # pylint: disable=useless-object-inherit
|
||||||
msg['Date'] = email.utils.formatdate(None, True)
|
msg['Date'] = email.utils.formatdate(None, True)
|
||||||
encoding = encoding if encoding else self._get_option('encoding')
|
encoding = encoding if encoding else self._get_option('encoding')
|
||||||
if template:
|
if template:
|
||||||
assert template in self.templates, f'Unknwon template {template}'
|
assert template in self.templates, "Unknwon template %s" % template
|
||||||
# Handle subject from template
|
# Handle subject from template
|
||||||
if not subject:
|
if not subject:
|
||||||
assert self.templates[template].get('subject'), f'No subject defined in template {template}'
|
assert self.templates[template].get('subject'), 'No subject defined in template %s' % template
|
||||||
msg['Subject'] = self.templates[template]['subject'].format(**template_vars)
|
msg['Subject'] = self.templates[template]['subject'].format(**template_vars)
|
||||||
|
|
||||||
# Put HTML part in last one to prefered it
|
# Put HTML part in last one to prefered it
|
||||||
|
@ -168,18 +168,14 @@ class EmailClient(ConfigurableObject): # pylint: disable=useless-object-inherit
|
||||||
part = MIMEBase('application', "octet-stream")
|
part = MIMEBase('application', "octet-stream")
|
||||||
part.set_payload(fp.read())
|
part.set_payload(fp.read())
|
||||||
encode_base64(part)
|
encode_base64(part)
|
||||||
part.add_header(
|
part.add_header('Content-Disposition', 'attachment; filename="%s"' % os.path.basename(filepath))
|
||||||
'Content-Disposition',
|
|
||||||
f'attachment; filename="{os.path.basename(filepath)}"')
|
|
||||||
msg.attach(part)
|
msg.attach(part)
|
||||||
if attachment_payloads:
|
if attachment_payloads:
|
||||||
for filename, payload in attachment_payloads:
|
for filename, payload in attachment_payloads:
|
||||||
part = MIMEBase('application', "octet-stream")
|
part = MIMEBase('application', "octet-stream")
|
||||||
part.set_payload(payload)
|
part.set_payload(payload)
|
||||||
encode_base64(part)
|
encode_base64(part)
|
||||||
part.add_header(
|
part.add_header('Content-Disposition', 'attachment; filename="%s"' % filename)
|
||||||
'Content-Disposition',
|
|
||||||
f'attachment; filename="{filename}"')
|
|
||||||
msg.attach(part)
|
msg.attach(part)
|
||||||
return msg
|
return msg
|
||||||
|
|
||||||
|
|
|
@ -82,7 +82,7 @@ def parse_exceptional_closures(values):
|
||||||
pstart = time.strptime(parts[0], date_format)
|
pstart = time.strptime(parts[0], date_format)
|
||||||
pstop = time.strptime(parts[1], date_format)
|
pstop = time.strptime(parts[1], date_format)
|
||||||
if pstop <= pstart:
|
if pstop <= pstart:
|
||||||
raise ValueError(f'Day {parts[1]} <= {parts[0]}')
|
raise ValueError('Day %s <= %s' % (parts[1], parts[0]))
|
||||||
|
|
||||||
date = datetime.date(pstart.tm_year, pstart.tm_mon, pstart.tm_mday)
|
date = datetime.date(pstart.tm_year, pstart.tm_mon, pstart.tm_mday)
|
||||||
stop_date = datetime.date(pstop.tm_year, pstop.tm_mon, pstop.tm_mday)
|
stop_date = datetime.date(pstop.tm_year, pstop.tm_mon, pstop.tm_mday)
|
||||||
|
@ -95,16 +95,16 @@ def parse_exceptional_closures(values):
|
||||||
mstart = time_pattern.match(parts[0])
|
mstart = time_pattern.match(parts[0])
|
||||||
mstop = time_pattern.match(parts[1])
|
mstop = time_pattern.match(parts[1])
|
||||||
if not mstart or not mstop:
|
if not mstart or not mstop:
|
||||||
raise ValueError(f'"{word}" is not a valid time period')
|
raise ValueError('"%s" is not a valid time period' % word)
|
||||||
hstart = datetime.time(int(mstart.group(1)), int(mstart.group(2) or 0))
|
hstart = datetime.time(int(mstart.group(1)), int(mstart.group(2) or 0))
|
||||||
hstop = datetime.time(int(mstop.group(1)), int(mstop.group(2) or 0))
|
hstop = datetime.time(int(mstop.group(1)), int(mstop.group(2) or 0))
|
||||||
if hstop <= hstart:
|
if hstop <= hstart:
|
||||||
raise ValueError(f'Time {parts[1]} <= {parts[0]}')
|
raise ValueError('Time %s <= %s' % (parts[1], parts[0]))
|
||||||
hours_periods.append({'start': hstart, 'stop': hstop})
|
hours_periods.append({'start': hstart, 'stop': hstop})
|
||||||
else:
|
else:
|
||||||
raise ValueError(f'Invalid number of part in this word: "{word}"')
|
raise ValueError('Invalid number of part in this word: "%s"' % word)
|
||||||
if not days:
|
if not days:
|
||||||
raise ValueError(f'No days found in value "{value}"')
|
raise ValueError('No days found in value "%s"' % value)
|
||||||
exceptional_closures.append({'days': days, 'hours_periods': hours_periods})
|
exceptional_closures.append({'days': days, 'hours_periods': hours_periods})
|
||||||
return exceptional_closures
|
return exceptional_closures
|
||||||
|
|
||||||
|
@ -123,7 +123,7 @@ def parse_normal_opening_hours(values):
|
||||||
if len(parts) == 1:
|
if len(parts) == 1:
|
||||||
# ex: jeudi
|
# ex: jeudi
|
||||||
if word not in week_days:
|
if word not in week_days:
|
||||||
raise ValueError(f'"{word}" is not a valid week day')
|
raise ValueError('"%s" is not a valid week day' % word)
|
||||||
if word not in days:
|
if word not in days:
|
||||||
days.append(word)
|
days.append(word)
|
||||||
elif len(parts) == 2:
|
elif len(parts) == 2:
|
||||||
|
@ -131,7 +131,7 @@ def parse_normal_opening_hours(values):
|
||||||
if parts[0] in week_days and parts[1] in week_days:
|
if parts[0] in week_days and parts[1] in week_days:
|
||||||
# ex: lundi-jeudi
|
# ex: lundi-jeudi
|
||||||
if week_days.index(parts[1]) <= week_days.index(parts[0]):
|
if week_days.index(parts[1]) <= week_days.index(parts[0]):
|
||||||
raise ValueError(f'"{parts[1]}" is before "{parts[0]}"')
|
raise ValueError('"%s" is before "%s"' % (parts[1], parts[0]))
|
||||||
started = False
|
started = False
|
||||||
for d in week_days:
|
for d in week_days:
|
||||||
if not started and d != parts[0]:
|
if not started and d != parts[0]:
|
||||||
|
@ -146,16 +146,16 @@ def parse_normal_opening_hours(values):
|
||||||
mstart = time_pattern.match(parts[0])
|
mstart = time_pattern.match(parts[0])
|
||||||
mstop = time_pattern.match(parts[1])
|
mstop = time_pattern.match(parts[1])
|
||||||
if not mstart or not mstop:
|
if not mstart or not mstop:
|
||||||
raise ValueError(f'"{word}" is not a valid time period')
|
raise ValueError('"%s" is not a valid time period' % word)
|
||||||
hstart = datetime.time(int(mstart.group(1)), int(mstart.group(2) or 0))
|
hstart = datetime.time(int(mstart.group(1)), int(mstart.group(2) or 0))
|
||||||
hstop = datetime.time(int(mstop.group(1)), int(mstop.group(2) or 0))
|
hstop = datetime.time(int(mstop.group(1)), int(mstop.group(2) or 0))
|
||||||
if hstop <= hstart:
|
if hstop <= hstart:
|
||||||
raise ValueError(f'Time {parts[1]} <= {parts[0]}')
|
raise ValueError('Time %s <= %s' % (parts[1], parts[0]))
|
||||||
hours_periods.append({'start': hstart, 'stop': hstop})
|
hours_periods.append({'start': hstart, 'stop': hstop})
|
||||||
else:
|
else:
|
||||||
raise ValueError(f'Invalid number of part in this word: "{word}"')
|
raise ValueError('Invalid number of part in this word: "%s"' % word)
|
||||||
if not days and not hours_periods:
|
if not days and not hours_periods:
|
||||||
raise ValueError(f'No days or hours period found in this value: "{value}"')
|
raise ValueError('No days or hours period found in this value: "%s"' % value)
|
||||||
normal_opening_hours.append({'days': days, 'hours_periods': hours_periods})
|
normal_opening_hours.append({'days': days, 'hours_periods': hours_periods})
|
||||||
return normal_opening_hours
|
return normal_opening_hours
|
||||||
|
|
||||||
|
@ -173,28 +173,18 @@ def is_closed(
|
||||||
when_weekday = week_days[when.timetuple().tm_wday]
|
when_weekday = week_days[when.timetuple().tm_wday]
|
||||||
on_error_result = None
|
on_error_result = None
|
||||||
if on_error == 'closed':
|
if on_error == 'closed':
|
||||||
on_error_result = {
|
on_error_result = {'closed': True, 'exceptional_closure': False, 'exceptional_closure_all_day': False}
|
||||||
'closed': True, 'exceptional_closure': False,
|
|
||||||
'exceptional_closure_all_day': False}
|
|
||||||
elif on_error == 'opened':
|
elif on_error == 'opened':
|
||||||
on_error_result = {
|
on_error_result = {'closed': False, 'exceptional_closure': False, 'exceptional_closure_all_day': False}
|
||||||
'closed': False, 'exceptional_closure': False,
|
|
||||||
'exceptional_closure_all_day': False}
|
|
||||||
|
|
||||||
log.debug(
|
log.debug("When = %s => date = %s / time = %s / week day = %s", when, when_date, when_time, when_weekday)
|
||||||
"When = %s => date = %s / time = %s / week day = %s",
|
|
||||||
when, when_date, when_time, when_weekday)
|
|
||||||
if nonworking_public_holidays_values:
|
if nonworking_public_holidays_values:
|
||||||
log.debug("Nonworking public holidays: %s", nonworking_public_holidays_values)
|
log.debug("Nonworking public holidays: %s", nonworking_public_holidays_values)
|
||||||
nonworking_days = nonworking_french_public_days_of_the_year()
|
nonworking_days = nonworking_french_public_days_of_the_year()
|
||||||
for day in nonworking_public_holidays_values:
|
for day in nonworking_public_holidays_values:
|
||||||
if day in nonworking_days and when_date == nonworking_days[day]:
|
if day in nonworking_days and when_date == nonworking_days[day]:
|
||||||
log.debug("Non working day: %s", day)
|
log.debug("Non working day: %s", day)
|
||||||
return {
|
return {'closed': True, 'exceptional_closure': exceptional_closure_on_nonworking_public_days, 'exceptional_closure_all_day': exceptional_closure_on_nonworking_public_days}
|
||||||
'closed': True,
|
|
||||||
'exceptional_closure': exceptional_closure_on_nonworking_public_days,
|
|
||||||
'exceptional_closure_all_day': exceptional_closure_on_nonworking_public_days
|
|
||||||
}
|
|
||||||
|
|
||||||
if exceptional_closures_values:
|
if exceptional_closures_values:
|
||||||
try:
|
try:
|
||||||
|
@ -211,14 +201,10 @@ def is_closed(
|
||||||
continue
|
continue
|
||||||
if not cl['hours_periods']:
|
if not cl['hours_periods']:
|
||||||
# All day exceptional closure
|
# All day exceptional closure
|
||||||
return {
|
return {'closed': True, 'exceptional_closure': True, 'exceptional_closure_all_day': True}
|
||||||
'closed': True, 'exceptional_closure': True,
|
|
||||||
'exceptional_closure_all_day': True}
|
|
||||||
for hp in cl['hours_periods']:
|
for hp in cl['hours_periods']:
|
||||||
if hp['start'] <= when_time <= hp['stop']:
|
if hp['start'] <= when_time <= hp['stop']:
|
||||||
return {
|
return {'closed': True, 'exceptional_closure': True, 'exceptional_closure_all_day': False}
|
||||||
'closed': True, 'exceptional_closure': True,
|
|
||||||
'exceptional_closure_all_day': False}
|
|
||||||
|
|
||||||
if normal_opening_hours_values:
|
if normal_opening_hours_values:
|
||||||
try:
|
try:
|
||||||
|
@ -235,21 +221,12 @@ def is_closed(
|
||||||
continue
|
continue
|
||||||
if not oh['hours_periods']:
|
if not oh['hours_periods']:
|
||||||
# All day opened
|
# All day opened
|
||||||
return {
|
return {'closed': False, 'exceptional_closure': False, 'exceptional_closure_all_day': False}
|
||||||
'closed': False, 'exceptional_closure': False,
|
|
||||||
'exceptional_closure_all_day': False}
|
|
||||||
for hp in oh['hours_periods']:
|
for hp in oh['hours_periods']:
|
||||||
if hp['start'] <= when_time <= hp['stop']:
|
if hp['start'] <= when_time <= hp['stop']:
|
||||||
return {
|
return {'closed': False, 'exceptional_closure': False, 'exceptional_closure_all_day': False}
|
||||||
'closed': False, 'exceptional_closure': False,
|
|
||||||
'exceptional_closure_all_day': False}
|
|
||||||
log.debug("Not in normal opening hours => closed")
|
log.debug("Not in normal opening hours => closed")
|
||||||
return {
|
return {'closed': True, 'exceptional_closure': False, 'exceptional_closure_all_day': False}
|
||||||
'closed': True, 'exceptional_closure': False,
|
|
||||||
'exceptional_closure_all_day': False}
|
|
||||||
|
|
||||||
# Not a nonworking day, not during exceptional closure and no normal opening
|
# Not a nonworking day, not during exceptional closure and no normal opening hours defined => Opened
|
||||||
# hours defined => Opened
|
return {'closed': False, 'exceptional_closure': False, 'exceptional_closure_all_day': False}
|
||||||
return {
|
|
||||||
'closed': False, 'exceptional_closure': False,
|
|
||||||
'exceptional_closure_all_day': False}
|
|
||||||
|
|
|
@ -14,7 +14,7 @@ log = logging.getLogger(__name__)
|
||||||
|
|
||||||
def init_logging(options, name, report=None):
|
def init_logging(options, name, report=None):
|
||||||
""" Initialize logging from calling script options """
|
""" Initialize logging from calling script options """
|
||||||
logformat = f'%(asctime)s - {name} - %(levelname)s - %(message)s'
|
logformat = '%(asctime)s - ' + name + ' - %(levelname)s - %(message)s'
|
||||||
if options.debug:
|
if options.debug:
|
||||||
loglevel = logging.DEBUG
|
loglevel = logging.DEBUG
|
||||||
elif options.verbose:
|
elif options.verbose:
|
||||||
|
@ -64,9 +64,7 @@ def get_opts_parser(desc=None, just_try=False, just_one=False, progress=False, c
|
||||||
action="store",
|
action="store",
|
||||||
type=str,
|
type=str,
|
||||||
dest="logfile",
|
dest="logfile",
|
||||||
help=(
|
help="Log file path (default: %s)" % get_default_opt_value(config, default_config, 'logfile'),
|
||||||
'Log file path (default: '
|
|
||||||
f'{get_default_opt_value(config, default_config, "logfile")})'),
|
|
||||||
default=get_default_opt_value(config, default_config, 'logfile')
|
default=get_default_opt_value(config, default_config, 'logfile')
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -111,7 +109,7 @@ def add_email_opts(parser, config=None):
|
||||||
default_config = dict(
|
default_config = dict(
|
||||||
smtp_host="127.0.0.1", smtp_port=25, smtp_ssl=False, smtp_tls=False, smtp_user=None,
|
smtp_host="127.0.0.1", smtp_port=25, smtp_ssl=False, smtp_tls=False, smtp_user=None,
|
||||||
smtp_password=None, smtp_debug=False, email_encoding=sys.getdefaultencoding(),
|
smtp_password=None, smtp_debug=False, email_encoding=sys.getdefaultencoding(),
|
||||||
sender_name=getpass.getuser(), sender_email=f'{getpass.getuser()}@{socket.gethostname()}',
|
sender_name=getpass.getuser(), sender_email=getpass.getuser() + '@' + socket.gethostname(),
|
||||||
catch_all=False
|
catch_all=False
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -120,9 +118,7 @@ def add_email_opts(parser, config=None):
|
||||||
action="store",
|
action="store",
|
||||||
type=str,
|
type=str,
|
||||||
dest="email_smtp_host",
|
dest="email_smtp_host",
|
||||||
help=(
|
help="SMTP host (default: %s)" % get_default_opt_value(config, default_config, 'smtp_host'),
|
||||||
'SMTP host (default: '
|
|
||||||
f'{get_default_opt_value(config, default_config, "smtp_host")})'),
|
|
||||||
default=get_default_opt_value(config, default_config, 'smtp_host')
|
default=get_default_opt_value(config, default_config, 'smtp_host')
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -131,7 +127,7 @@ def add_email_opts(parser, config=None):
|
||||||
action="store",
|
action="store",
|
||||||
type=int,
|
type=int,
|
||||||
dest="email_smtp_port",
|
dest="email_smtp_port",
|
||||||
help=f'SMTP port (default: {get_default_opt_value(config, default_config, "smtp_port")})',
|
help="SMTP port (default: %s)" % get_default_opt_value(config, default_config, 'smtp_port'),
|
||||||
default=get_default_opt_value(config, default_config, 'smtp_port')
|
default=get_default_opt_value(config, default_config, 'smtp_port')
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -139,7 +135,7 @@ def add_email_opts(parser, config=None):
|
||||||
'--smtp-ssl',
|
'--smtp-ssl',
|
||||||
action="store_true",
|
action="store_true",
|
||||||
dest="email_smtp_ssl",
|
dest="email_smtp_ssl",
|
||||||
help=f'Use SSL (default: {get_default_opt_value(config, default_config, "smtp_ssl")})',
|
help="Use SSL (default: %s)" % get_default_opt_value(config, default_config, 'smtp_ssl'),
|
||||||
default=get_default_opt_value(config, default_config, 'smtp_ssl')
|
default=get_default_opt_value(config, default_config, 'smtp_ssl')
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -147,7 +143,7 @@ def add_email_opts(parser, config=None):
|
||||||
'--smtp-tls',
|
'--smtp-tls',
|
||||||
action="store_true",
|
action="store_true",
|
||||||
dest="email_smtp_tls",
|
dest="email_smtp_tls",
|
||||||
help=f'Use TLS (default: {get_default_opt_value(config, default_config, "smtp_tls")})',
|
help="Use TLS (default: %s)" % get_default_opt_value(config, default_config, 'smtp_tls'),
|
||||||
default=get_default_opt_value(config, default_config, 'smtp_tls')
|
default=get_default_opt_value(config, default_config, 'smtp_tls')
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -156,7 +152,7 @@ def add_email_opts(parser, config=None):
|
||||||
action="store",
|
action="store",
|
||||||
type=str,
|
type=str,
|
||||||
dest="email_smtp_user",
|
dest="email_smtp_user",
|
||||||
help=f'SMTP username (default: {get_default_opt_value(config, default_config, "smtp_user")})',
|
help="SMTP username (default: %s)" % get_default_opt_value(config, default_config, 'smtp_user'),
|
||||||
default=get_default_opt_value(config, default_config, 'smtp_user')
|
default=get_default_opt_value(config, default_config, 'smtp_user')
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -165,7 +161,7 @@ def add_email_opts(parser, config=None):
|
||||||
action="store",
|
action="store",
|
||||||
type=str,
|
type=str,
|
||||||
dest="email_smtp_password",
|
dest="email_smtp_password",
|
||||||
help=f'SMTP password (default: {get_default_opt_value(config, default_config, "smtp_password")})',
|
help="SMTP password (default: %s)" % get_default_opt_value(config, default_config, 'smtp_password'),
|
||||||
default=get_default_opt_value(config, default_config, 'smtp_password')
|
default=get_default_opt_value(config, default_config, 'smtp_password')
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -173,7 +169,7 @@ def add_email_opts(parser, config=None):
|
||||||
'--smtp-debug',
|
'--smtp-debug',
|
||||||
action="store_true",
|
action="store_true",
|
||||||
dest="email_smtp_debug",
|
dest="email_smtp_debug",
|
||||||
help=f'Debug SMTP connection (default: {get_default_opt_value(config, default_config, "smtp_debug")})',
|
help="Debug SMTP connection (default: %s)" % get_default_opt_value(config, default_config, 'smtp_debug'),
|
||||||
default=get_default_opt_value(config, default_config, 'smtp_debug')
|
default=get_default_opt_value(config, default_config, 'smtp_debug')
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -182,7 +178,7 @@ def add_email_opts(parser, config=None):
|
||||||
action="store",
|
action="store",
|
||||||
type=str,
|
type=str,
|
||||||
dest="email_encoding",
|
dest="email_encoding",
|
||||||
help=f'SMTP encoding (default: {get_default_opt_value(config, default_config, "email_encoding")})',
|
help="SMTP encoding (default: %s)" % get_default_opt_value(config, default_config, 'email_encoding'),
|
||||||
default=get_default_opt_value(config, default_config, 'email_encoding')
|
default=get_default_opt_value(config, default_config, 'email_encoding')
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -191,7 +187,7 @@ def add_email_opts(parser, config=None):
|
||||||
action="store",
|
action="store",
|
||||||
type=str,
|
type=str,
|
||||||
dest="email_sender_name",
|
dest="email_sender_name",
|
||||||
help=f'Sender name (default: {get_default_opt_value(config, default_config, "sender_name")})',
|
help="Sender name (default: %s)" % get_default_opt_value(config, default_config, 'sender_name'),
|
||||||
default=get_default_opt_value(config, default_config, 'sender_name')
|
default=get_default_opt_value(config, default_config, 'sender_name')
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -200,7 +196,7 @@ def add_email_opts(parser, config=None):
|
||||||
action="store",
|
action="store",
|
||||||
type=str,
|
type=str,
|
||||||
dest="email_sender_email",
|
dest="email_sender_email",
|
||||||
help=f'Sender email (default: {get_default_opt_value(config, default_config, "sender_email")})',
|
help="Sender email (default: %s)" % get_default_opt_value(config, default_config, 'sender_email'),
|
||||||
default=get_default_opt_value(config, default_config, 'sender_email')
|
default=get_default_opt_value(config, default_config, 'sender_email')
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -209,9 +205,7 @@ def add_email_opts(parser, config=None):
|
||||||
action="store",
|
action="store",
|
||||||
type=str,
|
type=str,
|
||||||
dest="email_catch_all",
|
dest="email_catch_all",
|
||||||
help=(
|
help="Catch all sent email: specify catch recipient email address (default: %s)" % get_default_opt_value(config, default_config, 'catch_all'),
|
||||||
'Catch all sent email: specify catch recipient email address '
|
|
||||||
f'(default: {get_default_opt_value(config, default_config, "catch_all")})'),
|
|
||||||
default=get_default_opt_value(config, default_config, 'catch_all')
|
default=get_default_opt_value(config, default_config, 'catch_all')
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -29,49 +29,49 @@ def main(argv=None): # pylint: disable=too-many-locals,too-many-statements
|
||||||
init_logging(options, 'Test LDAP helpers')
|
init_logging(options, 'Test LDAP helpers')
|
||||||
|
|
||||||
now = datetime.datetime.now().replace(tzinfo=dateutil.tz.tzlocal())
|
now = datetime.datetime.now().replace(tzinfo=dateutil.tz.tzlocal())
|
||||||
print(f'Now = {now}')
|
print("Now = %s" % now)
|
||||||
|
|
||||||
datestring_now = format_datetime(now)
|
datestring_now = format_datetime(now)
|
||||||
print(f'format_datetime : {datestring_now}')
|
print("format_datetime : %s" % datestring_now)
|
||||||
print(f'format_datetime (from_timezone=utc) : {format_datetime(now.replace(tzinfo=None), from_timezone=pytz.utc)}')
|
print("format_datetime (from_timezone=utc) : %s" % format_datetime(now.replace(tzinfo=None), from_timezone=pytz.utc))
|
||||||
print(f'format_datetime (from_timezone=local) : {format_datetime(now.replace(tzinfo=None), from_timezone=dateutil.tz.tzlocal())}')
|
print("format_datetime (from_timezone=local) : %s" % format_datetime(now.replace(tzinfo=None), from_timezone=dateutil.tz.tzlocal()))
|
||||||
print(f'format_datetime (from_timezone=local) : {format_datetime(now.replace(tzinfo=None), from_timezone="local")}')
|
print("format_datetime (from_timezone='local') : %s" % format_datetime(now.replace(tzinfo=None), from_timezone='local'))
|
||||||
print(f'format_datetime (from_timezone=Paris) : {format_datetime(now.replace(tzinfo=None), from_timezone="Europe/Paris")}')
|
print("format_datetime (from_timezone=Paris) : %s" % format_datetime(now.replace(tzinfo=None), from_timezone='Europe/Paris'))
|
||||||
print(f'format_datetime (to_timezone=utc) : {format_datetime(now, to_timezone=pytz.utc)}')
|
print("format_datetime (to_timezone=utc) : %s" % format_datetime(now, to_timezone=pytz.utc))
|
||||||
print(f'format_datetime (to_timezone=local) : {format_datetime(now, to_timezone=dateutil.tz.tzlocal())}')
|
print("format_datetime (to_timezone=local) : %s" % format_datetime(now, to_timezone=dateutil.tz.tzlocal()))
|
||||||
print(f'format_datetime (to_timezone=local) : {format_datetime(now, to_timezone="local")}')
|
print("format_datetime (to_timezone='local') : %s" % format_datetime(now, to_timezone='local'))
|
||||||
print(f'format_datetime (to_timezone=Tokyo) : {format_datetime(now, to_timezone="Asia/Tokyo")}')
|
print("format_datetime (to_timezone=Tokyo) : %s" % format_datetime(now, to_timezone='Asia/Tokyo'))
|
||||||
print(f'format_datetime (naive=True) : {format_datetime(now, naive=True)}')
|
print("format_datetime (naive=True) : %s" % format_datetime(now, naive=True))
|
||||||
|
|
||||||
print(f'format_date : {format_date(now)}')
|
print("format_date : %s" % format_date(now))
|
||||||
print(f'format_date (from_timezone=utc) : {format_date(now.replace(tzinfo=None), from_timezone=pytz.utc)}')
|
print("format_date (from_timezone=utc) : %s" % format_date(now.replace(tzinfo=None), from_timezone=pytz.utc))
|
||||||
print(f'format_date (from_timezone=local) : {format_date(now.replace(tzinfo=None), from_timezone=dateutil.tz.tzlocal())}')
|
print("format_date (from_timezone=local) : %s" % format_date(now.replace(tzinfo=None), from_timezone=dateutil.tz.tzlocal()))
|
||||||
print(f'format_date (from_timezone=local) : {format_date(now.replace(tzinfo=None), from_timezone="local")}')
|
print("format_date (from_timezone='local') : %s" % format_date(now.replace(tzinfo=None), from_timezone='local'))
|
||||||
print(f'format_date (from_timezone=Paris) : {format_date(now.replace(tzinfo=None), from_timezone="Europe/Paris")}')
|
print("format_date (from_timezone=Paris) : %s" % format_date(now.replace(tzinfo=None), from_timezone='Europe/Paris'))
|
||||||
print(f'format_date (to_timezone=utc) : {format_date(now, to_timezone=pytz.utc)}')
|
print("format_date (to_timezone=utc) : %s" % format_date(now, to_timezone=pytz.utc))
|
||||||
print(f'format_date (to_timezone=local) : {format_date(now, to_timezone=dateutil.tz.tzlocal())}')
|
print("format_date (to_timezone=local) : %s" % format_date(now, to_timezone=dateutil.tz.tzlocal()))
|
||||||
print(f'format_date (to_timezone=local) : {format_date(now, to_timezone="local")}')
|
print("format_date (to_timezone='local') : %s" % format_date(now, to_timezone='local'))
|
||||||
print(f'format_date (to_timezone=Tokyo) : {format_date(now, to_timezone="Asia/Tokyo")}')
|
print("format_date (to_timezone=Tokyo) : %s" % format_date(now, to_timezone='Asia/Tokyo'))
|
||||||
print(f'format_date (naive=True) : {format_date(now, naive=True)}')
|
print("format_date (naive=True) : %s" % format_date(now, naive=True))
|
||||||
|
|
||||||
print(f'parse_datetime : {parse_datetime(datestring_now)}')
|
print("parse_datetime : %s" % parse_datetime(datestring_now))
|
||||||
print(f'parse_datetime (default_timezone=utc) : {parse_datetime(datestring_now[0:-1], default_timezone=pytz.utc)}')
|
print("parse_datetime (default_timezone=utc) : %s" % parse_datetime(datestring_now[0:-1], default_timezone=pytz.utc))
|
||||||
print(f'parse_datetime (default_timezone=local) : {parse_datetime(datestring_now[0:-1], default_timezone=dateutil.tz.tzlocal())}')
|
print("parse_datetime (default_timezone=local) : %s" % parse_datetime(datestring_now[0:-1], default_timezone=dateutil.tz.tzlocal()))
|
||||||
print(f'parse_datetime (default_timezone=local) : {parse_datetime(datestring_now[0:-1], default_timezone="local")}')
|
print("parse_datetime (default_timezone='local') : %s" % parse_datetime(datestring_now[0:-1], default_timezone='local'))
|
||||||
print(f'parse_datetime (default_timezone=Paris) : {parse_datetime(datestring_now[0:-1], default_timezone="Europe/Paris")}')
|
print("parse_datetime (default_timezone=Paris) : %s" % parse_datetime(datestring_now[0:-1], default_timezone='Europe/Paris'))
|
||||||
print(f'parse_datetime (to_timezone=utc) : {parse_datetime(datestring_now, to_timezone=pytz.utc)}')
|
print("parse_datetime (to_timezone=utc) : %s" % parse_datetime(datestring_now, to_timezone=pytz.utc))
|
||||||
print(f'parse_datetime (to_timezone=local) : {parse_datetime(datestring_now, to_timezone=dateutil.tz.tzlocal())}')
|
print("parse_datetime (to_timezone=local) : %s" % parse_datetime(datestring_now, to_timezone=dateutil.tz.tzlocal()))
|
||||||
print(f'parse_datetime (to_timezone=local) : {parse_datetime(datestring_now, to_timezone="local")}')
|
print("parse_datetime (to_timezone='local') : %s" % parse_datetime(datestring_now, to_timezone='local'))
|
||||||
print(f'parse_datetime (to_timezone=Tokyo) : {parse_datetime(datestring_now, to_timezone="Asia/Tokyo")}')
|
print("parse_datetime (to_timezone=Tokyo) : %s" % parse_datetime(datestring_now, to_timezone='Asia/Tokyo'))
|
||||||
print(f'parse_datetime (naive=True) : {parse_datetime(datestring_now, naive=True)}')
|
print("parse_datetime (naive=True) : %s" % parse_datetime(datestring_now, naive=True))
|
||||||
|
|
||||||
print(f'parse_date : {parse_date(datestring_now)}')
|
print("parse_date : %s" % parse_date(datestring_now))
|
||||||
print(f'parse_date (default_timezone=utc) : {parse_date(datestring_now[0:-1], default_timezone=pytz.utc)}')
|
print("parse_date (default_timezone=utc) : %s" % parse_date(datestring_now[0:-1], default_timezone=pytz.utc))
|
||||||
print(f'parse_date (default_timezone=local) : {parse_date(datestring_now[0:-1], default_timezone=dateutil.tz.tzlocal())}')
|
print("parse_date (default_timezone=local) : %s" % parse_date(datestring_now[0:-1], default_timezone=dateutil.tz.tzlocal()))
|
||||||
print(f'parse_date (default_timezone=local) : {parse_date(datestring_now[0:-1], default_timezone="local")}')
|
print("parse_date (default_timezone='local') : %s" % parse_date(datestring_now[0:-1], default_timezone='local'))
|
||||||
print(f'parse_date (default_timezone=Paris) : {parse_date(datestring_now[0:-1], default_timezone="Europe/Paris")}')
|
print("parse_date (default_timezone=Paris) : %s" % parse_date(datestring_now[0:-1], default_timezone='Europe/Paris'))
|
||||||
print(f'parse_date (to_timezone=utc) : {parse_date(datestring_now, to_timezone=pytz.utc)}')
|
print("parse_date (to_timezone=utc) : %s" % parse_date(datestring_now, to_timezone=pytz.utc))
|
||||||
print(f'parse_date (to_timezone=local) : {parse_date(datestring_now, to_timezone=dateutil.tz.tzlocal())}')
|
print("parse_date (to_timezone=local) : %s" % parse_date(datestring_now, to_timezone=dateutil.tz.tzlocal()))
|
||||||
print(f'parse_date (to_timezone=local) : {parse_date(datestring_now, to_timezone="local")}')
|
print("parse_date (to_timezone='local') : %s" % parse_date(datestring_now, to_timezone='local'))
|
||||||
print(f'parse_date (to_timezone=Tokyo) : {parse_date(datestring_now, to_timezone="Asia/Tokyo")}')
|
print("parse_date (to_timezone=Tokyo) : %s" % parse_date(datestring_now, to_timezone='Asia/Tokyo'))
|
||||||
print(f'parse_date (naive=True) : {parse_date(datestring_now, naive=True)}')
|
print("parse_date (naive=True) : %s" % parse_date(datestring_now, naive=True))
|
||||||
|
|
|
@ -27,7 +27,7 @@ def main(argv=None): # pylint: disable=too-many-locals,too-many-statements
|
||||||
action="store",
|
action="store",
|
||||||
type=int,
|
type=int,
|
||||||
dest="count",
|
dest="count",
|
||||||
help=f'Progress bar max value (default: {default_max_val})',
|
help="Progress bar max value (default: %s)" % default_max_val,
|
||||||
default=default_max_val
|
default=default_max_val
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
1
setup.py
1
setup.py
|
@ -11,7 +11,6 @@ extras_require = {
|
||||||
'mocker',
|
'mocker',
|
||||||
'pytest-mock',
|
'pytest-mock',
|
||||||
'pylint',
|
'pylint',
|
||||||
'flake8',
|
|
||||||
],
|
],
|
||||||
'config': [
|
'config': [
|
||||||
'argcomplete',
|
'argcomplete',
|
||||||
|
|
13
tests.sh
13
tests.sh
|
@ -21,20 +21,9 @@ fi
|
||||||
echo "Install package with dev dependencies using pip..."
|
echo "Install package with dev dependencies using pip..."
|
||||||
$VENV/bin/python3 -m pip install -e ".[dev]" $QUIET_ARG
|
$VENV/bin/python3 -m pip install -e ".[dev]" $QUIET_ARG
|
||||||
|
|
||||||
RES=0
|
|
||||||
# Run tests
|
# Run tests
|
||||||
$VENV/bin/python3 -m pytest tests
|
$VENV/bin/python3 -m pytest tests
|
||||||
[ $? -ne 0 ] && RES=1
|
RES=$?
|
||||||
|
|
||||||
# Run pylint
|
|
||||||
echo "Run pylint..."
|
|
||||||
$VENV/bin/pylint --extension-pkg-whitelist=cx_Oracle mylib tests
|
|
||||||
[ $? -ne 0 ] && RES=1
|
|
||||||
|
|
||||||
# Run flake8
|
|
||||||
echo "Run flake8..."
|
|
||||||
$VENV/bin/flake8 mylib tests
|
|
||||||
[ $? -ne 0 ] && RES=1
|
|
||||||
|
|
||||||
# Clean temporary venv
|
# Clean temporary venv
|
||||||
[ $TEMP_VENV -eq 1 ] && rm -fr $VENV
|
[ $TEMP_VENV -eq 1 ] && rm -fr $VENV
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# pylint: disable=redefined-outer-name,missing-function-docstring,protected-access,global-statement
|
# pylint: disable=redefined-outer-name,missing-function-docstring,protected-access,global-statement
|
||||||
# pylint: disable=global-variable-not-assigned
|
|
||||||
""" Tests on config lib """
|
""" Tests on config lib """
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
|
@ -21,12 +21,10 @@ class FakeCXOracleCursor:
|
||||||
def execute(self, sql, **params):
|
def execute(self, sql, **params):
|
||||||
assert self.opened
|
assert self.opened
|
||||||
if self.expected_exception:
|
if self.expected_exception:
|
||||||
raise cx_Oracle.Error(f'{self}.execute({sql}, {params}): expected exception')
|
raise cx_Oracle.Error("%s.execute(%s, %s): expected exception" % (self, sql, params))
|
||||||
if self.expected_just_try and not sql.lower().startswith('select '):
|
if self.expected_just_try and not sql.lower().startswith('select '):
|
||||||
assert False, f'{self}.execute({sql}, {params}) may not be executed in just try mode'
|
assert False, "%s.execute(%s, %s) may not be executed in just try mode" % (self, sql, params)
|
||||||
# pylint: disable=consider-using-f-string
|
|
||||||
assert sql == self.expected_sql, "%s.execute(): Invalid SQL query:\n '%s'\nMay be:\n '%s'" % (self, sql, self.expected_sql)
|
assert sql == self.expected_sql, "%s.execute(): Invalid SQL query:\n '%s'\nMay be:\n '%s'" % (self, sql, self.expected_sql)
|
||||||
# pylint: disable=consider-using-f-string
|
|
||||||
assert params == self.expected_params, "%s.execute(): Invalid params:\n %s\nMay be:\n %s" % (self, params, self.expected_params)
|
assert params == self.expected_params, "%s.execute(): Invalid params:\n %s\nMay be:\n %s" % (self, params, self.expected_params)
|
||||||
return self.expected_return
|
return self.expected_return
|
||||||
|
|
||||||
|
@ -42,9 +40,9 @@ class FakeCXOracleCursor:
|
||||||
self.opened = False
|
self.opened = False
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return (
|
return "FakeCXOracleCursor(%s, %s, %s, %s)" % (
|
||||||
f'FakeCXOracleCursor({self.expected_sql}, {self.expected_params}, '
|
self.expected_sql, self.expected_params,
|
||||||
f'{self.expected_return}, {self.expected_just_try})'
|
self.expected_return, self.expected_just_try
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -61,8 +59,8 @@ class FakeCXOracle:
|
||||||
def __init__(self, **kwargs):
|
def __init__(self, **kwargs):
|
||||||
allowed_kwargs = dict(dsn=str, user=str, password=(str, None))
|
allowed_kwargs = dict(dsn=str, user=str, password=(str, None))
|
||||||
for arg, value in kwargs.items():
|
for arg, value in kwargs.items():
|
||||||
assert arg in allowed_kwargs, f"Invalid arg {arg}='{value}'"
|
assert arg in allowed_kwargs, "Invalid arg %s='%s'" % (arg, value)
|
||||||
assert isinstance(value, allowed_kwargs[arg]), f"Arg {arg} not a {allowed_kwargs[arg]} ({type(value)})"
|
assert isinstance(value, allowed_kwargs[arg]), "Arg %s not a %s (%s)" % (arg, allowed_kwargs[arg], type(value))
|
||||||
setattr(self, arg, value)
|
setattr(self, arg, value)
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
|
@ -129,9 +127,7 @@ def fake_connected_just_try_oracledb(fake_just_try_oracledb):
|
||||||
|
|
||||||
def generate_mock_args(expected_args=(), expected_kwargs={}, expected_return=True): # pylint: disable=dangerous-default-value
|
def generate_mock_args(expected_args=(), expected_kwargs={}, expected_return=True): # pylint: disable=dangerous-default-value
|
||||||
def mock_args(*args, **kwargs):
|
def mock_args(*args, **kwargs):
|
||||||
# pylint: disable=consider-using-f-string
|
|
||||||
assert args == expected_args, "Invalid call args:\n %s\nMay be:\n %s" % (args, expected_args)
|
assert args == expected_args, "Invalid call args:\n %s\nMay be:\n %s" % (args, expected_args)
|
||||||
# pylint: disable=consider-using-f-string
|
|
||||||
assert kwargs == expected_kwargs, "Invalid call kwargs:\n %s\nMay be:\n %s" % (kwargs, expected_kwargs)
|
assert kwargs == expected_kwargs, "Invalid call kwargs:\n %s\nMay be:\n %s" % (kwargs, expected_kwargs)
|
||||||
return expected_return
|
return expected_return
|
||||||
return mock_args
|
return mock_args
|
||||||
|
@ -143,9 +139,7 @@ def mock_doSQL_just_try(self, sql, params=None): # pylint: disable=unused-argum
|
||||||
|
|
||||||
def generate_mock_doSQL(expected_sql, expected_params={}, expected_return=True): # pylint: disable=dangerous-default-value
|
def generate_mock_doSQL(expected_sql, expected_params={}, expected_return=True): # pylint: disable=dangerous-default-value
|
||||||
def mock_doSQL(self, sql, params=None): # pylint: disable=unused-argument
|
def mock_doSQL(self, sql, params=None): # pylint: disable=unused-argument
|
||||||
# pylint: disable=consider-using-f-string
|
|
||||||
assert sql == expected_sql, "Invalid generated SQL query:\n '%s'\nMay be:\n '%s'" % (sql, expected_sql)
|
assert sql == expected_sql, "Invalid generated SQL query:\n '%s'\nMay be:\n '%s'" % (sql, expected_sql)
|
||||||
# pylint: disable=consider-using-f-string
|
|
||||||
assert params == expected_params, "Invalid generated params:\n %s\nMay be:\n %s" % (params, expected_params)
|
assert params == expected_params, "Invalid generated params:\n %s\nMay be:\n %s" % (params, expected_params)
|
||||||
return expected_return
|
return expected_return
|
||||||
return mock_doSQL
|
return mock_doSQL
|
||||||
|
|
|
@ -19,12 +19,10 @@ class FakePsycopg2Cursor:
|
||||||
|
|
||||||
def execute(self, sql, params=None):
|
def execute(self, sql, params=None):
|
||||||
if self.expected_exception:
|
if self.expected_exception:
|
||||||
raise psycopg2.Error(f'{self}.execute({sql}, {params}): expected exception')
|
raise psycopg2.Error("%s.execute(%s, %s): expected exception" % (self, sql, params))
|
||||||
if self.expected_just_try and not sql.lower().startswith('select '):
|
if self.expected_just_try and not sql.lower().startswith('select '):
|
||||||
assert False, f'{self}.execute({sql}, {params}) may not be executed in just try mode'
|
assert False, "%s.execute(%s, %s) may not be executed in just try mode" % (self, sql, params)
|
||||||
# pylint: disable=consider-using-f-string
|
|
||||||
assert sql == self.expected_sql, "%s.execute(): Invalid SQL query:\n '%s'\nMay be:\n '%s'" % (self, sql, self.expected_sql)
|
assert sql == self.expected_sql, "%s.execute(): Invalid SQL query:\n '%s'\nMay be:\n '%s'" % (self, sql, self.expected_sql)
|
||||||
# pylint: disable=consider-using-f-string
|
|
||||||
assert params == self.expected_params, "%s.execute(): Invalid params:\n %s\nMay be:\n %s" % (self, params, self.expected_params)
|
assert params == self.expected_params, "%s.execute(): Invalid params:\n %s\nMay be:\n %s" % (self, params, self.expected_params)
|
||||||
return self.expected_return
|
return self.expected_return
|
||||||
|
|
||||||
|
@ -32,9 +30,9 @@ class FakePsycopg2Cursor:
|
||||||
return self.expected_return
|
return self.expected_return
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return (
|
return "FakePsycopg2Cursor(%s, %s, %s, %s)" % (
|
||||||
f'FakePsycopg2Cursor({self.expected_sql}, {self.expected_params}, '
|
self.expected_sql, self.expected_params,
|
||||||
f'{self.expected_return}, {self.expected_just_try})'
|
self.expected_return, self.expected_just_try
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -51,9 +49,8 @@ class FakePsycopg2:
|
||||||
def __init__(self, **kwargs):
|
def __init__(self, **kwargs):
|
||||||
allowed_kwargs = dict(dbname=str, user=str, password=(str, None), host=str)
|
allowed_kwargs = dict(dbname=str, user=str, password=(str, None), host=str)
|
||||||
for arg, value in kwargs.items():
|
for arg, value in kwargs.items():
|
||||||
assert arg in allowed_kwargs, f'Invalid arg {arg}="{value}"'
|
assert arg in allowed_kwargs, "Invalid arg %s='%s'" % (arg, value)
|
||||||
assert isinstance(value, allowed_kwargs[arg]), \
|
assert isinstance(value, allowed_kwargs[arg]), "Arg %s not a %s (%s)" % (arg, allowed_kwargs[arg], type(value))
|
||||||
f'Arg {arg} not a {allowed_kwargs[arg]} ({type(value)})'
|
|
||||||
setattr(self, arg, value)
|
setattr(self, arg, value)
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
|
@ -63,7 +60,7 @@ class FakePsycopg2:
|
||||||
self._check_just_try()
|
self._check_just_try()
|
||||||
assert len(arg) == 1 and isinstance(arg[0], str)
|
assert len(arg) == 1 and isinstance(arg[0], str)
|
||||||
if self.expected_exception:
|
if self.expected_exception:
|
||||||
raise psycopg2.Error(f'set_client_encoding({arg[0]}): Expected exception')
|
raise psycopg2.Error("set_client_encoding(%s): Expected exception" % arg[0])
|
||||||
return self.expected_return
|
return self.expected_return
|
||||||
|
|
||||||
def cursor(self):
|
def cursor(self):
|
||||||
|
@ -127,9 +124,7 @@ def fake_connected_just_try_pgdb(fake_just_try_pgdb):
|
||||||
|
|
||||||
def generate_mock_args(expected_args=(), expected_kwargs={}, expected_return=True): # pylint: disable=dangerous-default-value
|
def generate_mock_args(expected_args=(), expected_kwargs={}, expected_return=True): # pylint: disable=dangerous-default-value
|
||||||
def mock_args(*args, **kwargs):
|
def mock_args(*args, **kwargs):
|
||||||
# pylint: disable=consider-using-f-string
|
|
||||||
assert args == expected_args, "Invalid call args:\n %s\nMay be:\n %s" % (args, expected_args)
|
assert args == expected_args, "Invalid call args:\n %s\nMay be:\n %s" % (args, expected_args)
|
||||||
# pylint: disable=consider-using-f-string
|
|
||||||
assert kwargs == expected_kwargs, "Invalid call kwargs:\n %s\nMay be:\n %s" % (kwargs, expected_kwargs)
|
assert kwargs == expected_kwargs, "Invalid call kwargs:\n %s\nMay be:\n %s" % (kwargs, expected_kwargs)
|
||||||
return expected_return
|
return expected_return
|
||||||
return mock_args
|
return mock_args
|
||||||
|
@ -141,9 +136,7 @@ def mock_doSQL_just_try(self, sql, params=None): # pylint: disable=unused-argum
|
||||||
|
|
||||||
def generate_mock_doSQL(expected_sql, expected_params={}, expected_return=True): # pylint: disable=dangerous-default-value
|
def generate_mock_doSQL(expected_sql, expected_params={}, expected_return=True): # pylint: disable=dangerous-default-value
|
||||||
def mock_doSQL(self, sql, params=None): # pylint: disable=unused-argument
|
def mock_doSQL(self, sql, params=None): # pylint: disable=unused-argument
|
||||||
# pylint: disable=consider-using-f-string
|
|
||||||
assert sql == expected_sql, "Invalid generated SQL query:\n '%s'\nMay be:\n '%s'" % (sql, expected_sql)
|
assert sql == expected_sql, "Invalid generated SQL query:\n '%s'\nMay be:\n '%s'" % (sql, expected_sql)
|
||||||
# pylint: disable=consider-using-f-string
|
|
||||||
assert params == expected_params, "Invalid generated params:\n %s\nMay be:\n %s" % (params, expected_params)
|
assert params == expected_params, "Invalid generated params:\n %s\nMay be:\n %s" % (params, expected_params)
|
||||||
return expected_return
|
return expected_return
|
||||||
return mock_doSQL
|
return mock_doSQL
|
||||||
|
|
Loading…
Reference in a new issue