Compare commits
195 commits
Author | SHA1 | Date | |
---|---|---|---|
|
6ccea48b23 | ||
|
601857a2d4 | ||
|
4c51d0086f | ||
|
384e8a441d | ||
|
9e3faee819 | ||
|
296618a34e | ||
28103836ac | |||
3cf6a2682c | |||
eb87516e1a | |||
5dbdb0ffe6 | |||
b45819428d | |||
85caf81ac2 | |||
|
09c422efe2 | ||
|
e368521a96 | ||
|
25cdf9d4dc | ||
|
4962b16099 | ||
|
371d194728 | ||
|
dcaec24ea4 | ||
|
2736fc30ae | ||
|
73795d27b8 | ||
|
07ab4490d2 | ||
|
68c2103c58 | ||
|
0064fa979c | ||
|
b92a814577 | ||
|
8a0a65465d | ||
|
8e0e75f30e | ||
|
14d82fe796 | ||
|
698fd52a03 | ||
|
71a49f7b2f | ||
|
e38e5b10a7 | ||
|
3a443e1fa5 | ||
|
44bd9a6446 | ||
|
f8602801d7 | ||
|
e8572e2eaa | ||
|
f597164305 | ||
|
72877dd13e | ||
|
ebd73812bc | ||
|
5693cf8f8a | ||
|
63d6a6e0ed | ||
|
73735b378f | ||
|
c93b3508ed | ||
|
d75a61b4e8 | ||
|
93b06d6127 | ||
|
e71fb28295 | ||
|
b5df95a2dd | ||
|
5aa6a0cea4 | ||
|
3efaceb823 | ||
|
56f66dcd6e | ||
|
86e1d59b1d | ||
|
62c3fadf96 | ||
|
a83c3d635f | ||
|
69d6a596a8 | ||
|
3a43b8d07d | ||
|
56162452ac | ||
|
5fefc1ed84 | ||
|
e9477b1566 | ||
|
508a28e5c8 | ||
|
6a7368deb5 | ||
|
014a0802f8 | ||
|
135a742b6e | ||
da63f533be | |||
83ce6b9d1b | |||
b0fd2b1c6d | |||
815081d544 | |||
|
01d71fef89 | ||
|
ad04357c6b | ||
|
eb183b0d3b | ||
|
55782df47c | ||
7efacf04e4 | |||
a1118cc40a | |||
|
cb4b8d6974 | ||
|
c643fd30ac | ||
|
651e1a1a6c | ||
|
85d34b7b9a | ||
|
fd2911e810 | ||
|
cb9146f6e9 | ||
|
d0676443d7 | ||
|
68729f301f | ||
|
31eeff367c | ||
|
86eae5dae5 | ||
|
6adcc1eeed | ||
|
e858cb3d71 | ||
|
54d6c6e0f3 | ||
|
9511b31a79 | ||
|
b80cc3b3b6 | ||
|
f541630a63 | ||
|
2bc9964b12 | ||
|
fe3e3ed5f4 | ||
|
cbb97ae726 | ||
|
025fd12dc4 | ||
|
e8de509346 | ||
|
5a7a46355c | ||
|
a36ce4070b | ||
|
be80b1ed8c | ||
|
6ac1216ed8 | ||
|
5f0527f0c3 | ||
|
ade97bc90f | ||
|
e7e07a944a | ||
|
3bf87222fd | ||
|
2cdc7b870d | ||
|
2927065ed6 | ||
|
bf37e6223e | ||
|
03668e7003 | ||
|
f07ec8d0e9 | ||
|
36269c81c1 | ||
|
c02c271375 | ||
|
adcfe27ee1 | ||
|
1e9291af64 | ||
|
fd1317cce6 | ||
|
ed87cd09f0 | ||
|
689ef096a4 | ||
|
ce9eb1cdb0 | ||
|
64c8659000 | ||
|
f6a75f8cb6 | ||
|
063a3d637f | ||
|
af616a3e2b | ||
|
6267e0baf8 | ||
|
07ca0729fe | ||
|
a618f588ba | ||
|
c89ea4ab4c | ||
|
ac9a35bfe1 | ||
|
a0e7aae504 | ||
|
735ba92646 | ||
|
e3931e50da | ||
|
c4625e47ea | ||
|
bc276b9845 | ||
|
252f9a87ac | ||
|
bca3f4f347 | ||
|
0a0fae3e90 | ||
|
63600416b3 | ||
|
ef94c6969f | ||
|
9a89638e54 | ||
|
b90994037a | ||
|
c851e7aca7 | ||
|
5a30541917 | ||
|
fdf68a8ee2 | ||
|
ba4e85be50 | ||
|
0823353c0a | ||
|
558da5c815 | ||
|
26e7b8efd1 | ||
|
947ed765aa | ||
|
d1102ce1b3 | ||
|
4d4e8f133b | ||
|
31b994ad3c | ||
|
8e4864f2d5 | ||
|
d15c4f36fb | ||
|
6601891f9d | ||
|
2e829c4cc3 | ||
|
e058d315d4 | ||
|
8438f95e68 | ||
|
e6ce412db0 | ||
|
9c35ead28e | ||
|
ef510a36ee | ||
|
366260e92a | ||
|
afdb1430a6 | ||
|
b7a576eb52 | ||
|
5acbdbe8e3 | ||
|
4691ba7386 | ||
|
f8225bfbc9 | ||
|
6c7f3e4871 | ||
|
0eda55f11c | ||
|
48c1fb085e | ||
|
bfccaa0945 | ||
|
7b1019cb1b | ||
|
dec88f814e | ||
|
c77f9e5674 | ||
|
6b825813c1 | ||
|
86be381db9 | ||
|
94ed2e50fd | ||
|
f8d78f6fe2 | ||
|
73c19816f7 | ||
|
dc0f17dd20 | ||
|
439a1e9b33 | ||
|
9d611284ef | ||
|
afe54819bb | ||
|
4ff38aa898 | ||
|
21bd45ad25 | ||
|
6e778a8691 | ||
|
51dd4f5724 | ||
|
b27cae5b1e | ||
|
9134502ec4 | ||
|
b9ba720798 | ||
|
6e85515420 | ||
|
ca96b074d3 | ||
|
a325803130 | ||
|
1cd9432e25 | ||
|
5bdfc8479d | ||
|
b2d33e3ddf | ||
|
51a4d35823 | ||
|
45f088fa2b | ||
|
f44006535b | ||
|
6bbacce38a | ||
|
ad31ddc699 | ||
|
bc291ea21e | ||
|
bd4845f780 |
58 changed files with 9730 additions and 1730 deletions
86
.forgejo/workflows/release.yaml
Normal file
86
.forgejo/workflows/release.yaml
Normal file
|
@ -0,0 +1,86 @@
|
|||
---
|
||||
name: Build and publish Debian & Python packages
|
||||
on: ["create"]
|
||||
jobs:
|
||||
build:
|
||||
runs-on: docker
|
||||
container:
|
||||
image: docker.io/brenard/debian-python-deb:latest
|
||||
steps:
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Build Debian & Python package
|
||||
env:
|
||||
MAINTAINER_NAME: ${{ vars.MAINTAINER_NAME }}
|
||||
MAINTAINER_EMAIL: ${{ vars.MAINTAINER_EMAIL }}
|
||||
DEBIAN_CODENAME: ${{ vars.DEBIAN_CODENAME }}
|
||||
run: |
|
||||
echo "${{ secrets.GPG_KEY }}"|base64 -d|gpg --import
|
||||
./build.sh
|
||||
rm -fr deb_dist/mylib-*
|
||||
- name: Upload Debian & Python package files
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: dist
|
||||
path: |
|
||||
dist
|
||||
deb_dist
|
||||
|
||||
publish-forgejo:
|
||||
runs-on: docker
|
||||
container:
|
||||
image: docker.io/brenard/debian-python-deb:latest
|
||||
needs: build
|
||||
steps:
|
||||
- name: Download Debian & Python packages files
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: dist
|
||||
|
||||
- name: Create the release
|
||||
id: create-release
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir release
|
||||
mv dist/*.whl dist/*.tar.gz release/
|
||||
mv deb_dist/*.deb release/
|
||||
md5sum release/* > md5sum.txt
|
||||
sha512sum release/* > sha512sum.txt
|
||||
mv md5sum.txt sha512sum.txt release/
|
||||
{
|
||||
echo 'release_note<<EOF'
|
||||
cat dist/release_notes.md
|
||||
echo 'EOF'
|
||||
} >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Publish release on Forgejo
|
||||
uses: actions/forgejo-release@v1
|
||||
with:
|
||||
direction: upload
|
||||
url: https://gitea.zionetrix.net
|
||||
token: ${{ secrets.forgejo_token }}
|
||||
release-dir: release
|
||||
release-notes: ${{ steps.create-release.outputs.release_note }}
|
||||
|
||||
publish-aptly:
|
||||
runs-on: docker
|
||||
container:
|
||||
image: docker.io/brenard/aptly-publish:latest
|
||||
needs: build
|
||||
steps:
|
||||
- name: "Download Debian package files"
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: dist
|
||||
|
||||
- name: "Publish Debian package on Aptly repository"
|
||||
uses: https://gitea.zionetrix.net/bn8/aptly-publish@master
|
||||
with:
|
||||
api_url: ${{ vars.apt_api_url }}
|
||||
api_username: ${{ vars.apt_api_username }}
|
||||
api_password: ${{ secrets.apt_api_password }}
|
||||
repo_name: ${{ vars.apt_repo_name }}
|
||||
path: "deb_dist"
|
||||
source_name: ${{ vars.apt_source_name }}
|
14
.forgejo/workflows/tests.yaml
Normal file
14
.forgejo/workflows/tests.yaml
Normal file
|
@ -0,0 +1,14 @@
|
|||
---
|
||||
name: Run tests
|
||||
on: [push]
|
||||
jobs:
|
||||
tests:
|
||||
runs-on: docker
|
||||
container:
|
||||
image: docker.io/brenard/mylib:dev-master
|
||||
options: "--workdir /src"
|
||||
steps:
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v4
|
||||
- name: Run tests.sh
|
||||
run: ./tests.sh --no-venv
|
6
.gitignore
vendored
6
.gitignore
vendored
|
@ -1,2 +1,8 @@
|
|||
*.pyc
|
||||
*~
|
||||
mylib.egg-info
|
||||
venv*
|
||||
build
|
||||
dist
|
||||
deb_dist
|
||||
mylib-*.tar.gz
|
||||
|
|
71
.pre-commit-config.yaml
Normal file
71
.pre-commit-config.yaml
Normal file
|
@ -0,0 +1,71 @@
|
|||
# Pre-commit hooks to run tests and ensure code is cleaned.
|
||||
# See https://pre-commit.com for more information
|
||||
---
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.1.6
|
||||
hooks:
|
||||
- id: ruff
|
||||
args: ["--fix"]
|
||||
- repo: https://github.com/asottile/pyupgrade
|
||||
rev: v3.15.0
|
||||
hooks:
|
||||
- id: pyupgrade
|
||||
args: ["--keep-percent-format", "--py37-plus"]
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 23.11.0
|
||||
hooks:
|
||||
- id: black
|
||||
args: ["--target-version", "py37", "--line-length", "100"]
|
||||
- repo: https://github.com/PyCQA/isort
|
||||
rev: 5.12.0
|
||||
hooks:
|
||||
- id: isort
|
||||
args: ["--profile", "black", "--line-length", "100"]
|
||||
- repo: https://github.com/PyCQA/flake8
|
||||
rev: 6.1.0
|
||||
hooks:
|
||||
- id: flake8
|
||||
args: ["--max-line-length=100"]
|
||||
- repo: https://github.com/codespell-project/codespell
|
||||
rev: v2.2.2
|
||||
hooks:
|
||||
- id: codespell
|
||||
args:
|
||||
- --ignore-words-list=exten
|
||||
- --skip="./.*,*.csv,*.json,*.ini,*.subject,*.txt,*.html,*.log,*.conf"
|
||||
- --quiet-level=2
|
||||
- --ignore-regex=.*codespell-ignore$
|
||||
# - --write-changes # Uncomment to write changes
|
||||
exclude_types: [csv, json]
|
||||
- repo: https://github.com/adrienverge/yamllint
|
||||
rev: v1.32.0
|
||||
hooks:
|
||||
- id: yamllint
|
||||
ignore: .github/
|
||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||
rev: v2.7.1
|
||||
hooks:
|
||||
- id: prettier
|
||||
args: ["--print-width", "100"]
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: pylint
|
||||
name: pylint
|
||||
entry: ./.pre-commit-pylint --extension-pkg-whitelist=cx_Oracle
|
||||
language: system
|
||||
types: [python]
|
||||
require_serial: true
|
||||
- repo: https://github.com/PyCQA/bandit
|
||||
rev: 1.7.5
|
||||
hooks:
|
||||
- id: bandit
|
||||
args: [--skip, "B101", --recursive, "mylib"]
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: pytest
|
||||
name: pytest
|
||||
entry: ./.pre-commit-pytest tests
|
||||
language: system
|
||||
types: [python]
|
||||
pass_filenames: false
|
21
.pre-commit-pylint
Executable file
21
.pre-commit-pylint
Executable file
|
@ -0,0 +1,21 @@
|
|||
#!/bin/bash
|
||||
|
||||
PWD=`pwd`
|
||||
|
||||
if [ -d "$PWD/venv" ]
|
||||
then
|
||||
echo "Run pylint inside venv ($PWD/venv)..."
|
||||
[ ! -e "$PWD/venv/bin/pylint" ] && $PWD/venv/bin/python -m pip install pylint
|
||||
$PWD/venv/bin/pylint "$@"
|
||||
exit $?
|
||||
elif [ -e "$PWD/pyproject.toml" ]
|
||||
then
|
||||
echo "Run pylint using poetry..."
|
||||
poetry run pylint --version > /dev/null 2>&1 || poetry run python -m pip install pylint
|
||||
poetry run pylint "$@"
|
||||
exit $?
|
||||
else
|
||||
echo "Run pylint at system scope..."
|
||||
pylint "$@"
|
||||
exit $?
|
||||
fi
|
21
.pre-commit-pytest
Executable file
21
.pre-commit-pytest
Executable file
|
@ -0,0 +1,21 @@
|
|||
#!/bin/bash
|
||||
|
||||
PWD=`pwd`
|
||||
|
||||
if [ -d "$PWD/venv" ]
|
||||
then
|
||||
echo "Run pytest inside venv ($PWD/venv)..."
|
||||
[ ! -e "$PWD/venv/bin/pytest" ] && $PWD/venv/bin/python -m pip install pytest
|
||||
$PWD/venv/bin/pytest "$@"
|
||||
exit $?
|
||||
elif [ -e "$PWD/pyproject.toml" ]
|
||||
then
|
||||
echo "Run pytest using poetry..."
|
||||
poetry run pytest --version > /dev/null 2>&1 || poetry run python -m pip install pytest
|
||||
poetry run pytest "$@"
|
||||
exit $?
|
||||
else
|
||||
echo "Run pytest at system scope..."
|
||||
pytest "$@"
|
||||
exit $?
|
||||
fi
|
17
.pylintrc
Normal file
17
.pylintrc
Normal file
|
@ -0,0 +1,17 @@
|
|||
[MESSAGES CONTROL]
|
||||
disable=invalid-name,
|
||||
locally-disabled,
|
||||
too-many-arguments,
|
||||
too-many-branches,
|
||||
too-many-locals,
|
||||
too-many-return-statements,
|
||||
too-many-nested-blocks,
|
||||
too-many-instance-attributes,
|
||||
too-many-lines,
|
||||
too-many-statements,
|
||||
logging-too-many-args,
|
||||
duplicate-code,
|
||||
|
||||
[FORMAT]
|
||||
# Maximum number of characters on a single line.
|
||||
max-line-length=100
|
400
EmailClient.py
400
EmailClient.py
|
@ -1,400 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
""" Email client """
|
||||
|
||||
import logging
|
||||
import os
|
||||
import smtplib
|
||||
import email.utils
|
||||
from email.mime.text import MIMEText
|
||||
from email.mime.multipart import MIMEMultipart
|
||||
from email.mime.base import MIMEBase
|
||||
from email.encoders import encode_base64
|
||||
|
||||
from mako.template import Template as MakoTemplate
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class EmailClient(object): # pylint: disable=useless-object-inheritance,too-many-instance-attributes
|
||||
"""
|
||||
Email client
|
||||
|
||||
This class abstract all interactions with the SMTP server.
|
||||
"""
|
||||
|
||||
smtp_host = None
|
||||
smtp_port = None
|
||||
smtp_ssl = None
|
||||
smtp_tls = None
|
||||
smtp_user = None
|
||||
smtp_password = None
|
||||
smtp_debug = None
|
||||
|
||||
sender_name = None
|
||||
sender_email = None
|
||||
|
||||
catch_all_addr = False
|
||||
just_try = False
|
||||
|
||||
encoding = 'utf-8'
|
||||
|
||||
templates = dict()
|
||||
|
||||
def __init__(self, smtp_host=None, smtp_port=None, smtp_ssl=None, smtp_tls=None, smtp_user=None, smtp_password=None, smtp_debug=None,
|
||||
sender_name=None, sender_email=None, catch_all_addr=None, just_try=None, encoding=None, templates=None):
|
||||
self.smtp_host = smtp_host if smtp_host else 'localhost'
|
||||
self.smtp_port = smtp_port if smtp_port else 25
|
||||
self.smtp_ssl = bool(smtp_ssl)
|
||||
self.smtp_tls = bool(smtp_tls)
|
||||
self.smtp_user = smtp_user if smtp_user else None
|
||||
self.smtp_password = smtp_password if smtp_password else None
|
||||
self.smtp_debug = bool(smtp_debug)
|
||||
|
||||
self.sender_name = sender_name if sender_name else "No reply"
|
||||
self.sender_email = sender_email if sender_email else "noreply@localhost"
|
||||
self.catch_all_addr = catch_all_addr if catch_all_addr else False
|
||||
self.just_try = just_try if just_try else False
|
||||
|
||||
assert templates is None or isinstance(templates, dict)
|
||||
self.templates = templates if templates else dict()
|
||||
|
||||
if encoding:
|
||||
self.encoding = encoding
|
||||
|
||||
def forge_message(self, rcpt_to, subject=None, html_body=None, text_body=None, attachment_files=None,
|
||||
attachment_payloads=None, sender_name=None, sender_email=None, encoding=None,
|
||||
template=None, **template_vars): # pylint: disable=too-many-arguments,too-many-locals
|
||||
"""
|
||||
Forge a message
|
||||
|
||||
:param rcpt_to: The recipient of the email. Could be a tuple(name, email) or just the email of the recipient.
|
||||
:param subject: The subject of the email.
|
||||
:param html_body: The HTML body of the email
|
||||
:param text_body: The plain text body of the email
|
||||
:param attachment_files: List of filepaths to attach
|
||||
:param attachment_payloads: List of tuples with filename and payload to attach
|
||||
:param sender_name: Custom sender name (default: as defined on initialization)
|
||||
:param sender_email: Custom sender email (default: as defined on initialization)
|
||||
:param encoding: Email content encoding (default: as defined on initialization)
|
||||
:param template: The name of a template to use to forge this email
|
||||
|
||||
All other parameters will be consider as template variables.
|
||||
"""
|
||||
msg = MIMEMultipart('alternative')
|
||||
msg['To'] = email.utils.formataddr(rcpt_to) if isinstance(rcpt_to, tuple) else rcpt_to
|
||||
msg['From'] = email.utils.formataddr((sender_name or self.sender_name, sender_email or self.sender_email))
|
||||
if subject:
|
||||
msg['Subject'] = subject.format(**template_vars)
|
||||
msg['Date'] = email.utils.formatdate(None, True)
|
||||
encoding = encoding if encoding else self.encoding
|
||||
if template:
|
||||
assert template in self.templates, "Unknwon template %s" % template
|
||||
# Handle subject from template
|
||||
if not subject:
|
||||
assert self.templates[template].get('subject'), 'No subject defined in template %s' % template
|
||||
msg['Subject'] = self.templates[template]['subject'].format(**template_vars)
|
||||
|
||||
# Put HTML part in last one to prefered it
|
||||
parts = []
|
||||
if self.templates[template].get('text'):
|
||||
if isinstance(self.templates[template]['text'], MakoTemplate):
|
||||
parts.append((self.templates[template]['text'].render(**template_vars), 'plain'))
|
||||
else:
|
||||
parts.append((self.templates[template]['text'].format(**template_vars), 'plain'))
|
||||
if self.templates[template].get('html'):
|
||||
if isinstance(self.templates[template]['html'], MakoTemplate):
|
||||
parts.append((self.templates[template]['html'].render(**template_vars), 'html'))
|
||||
else:
|
||||
parts.append((self.templates[template]['html'].format(**template_vars), 'html'))
|
||||
|
||||
for body, mime_type in parts:
|
||||
msg.attach(MIMEText(body.encode(encoding), mime_type, _charset=encoding))
|
||||
else:
|
||||
assert subject, 'No subject provided'
|
||||
if text_body:
|
||||
msg.attach(MIMEText(text_body.encode(encoding), 'plain', _charset=encoding))
|
||||
if html_body:
|
||||
msg.attach(MIMEText(html_body.encode(encoding), 'html', _charset=encoding))
|
||||
if attachment_files:
|
||||
for filepath in attachment_files:
|
||||
with open(filepath, 'rb') as fp:
|
||||
part = MIMEBase('application', "octet-stream")
|
||||
part.set_payload(fp.read())
|
||||
encode_base64(part)
|
||||
part.add_header('Content-Disposition', 'attachment; filename="%s"' % os.path.basename(filepath))
|
||||
msg.attach(part)
|
||||
if attachment_payloads:
|
||||
for filename, payload in attachment_payloads:
|
||||
part = MIMEBase('application', "octet-stream")
|
||||
part.set_payload(payload)
|
||||
encode_base64(part)
|
||||
part.add_header('Content-Disposition', 'attachment; filename="%s"' % filename)
|
||||
msg.attach(part)
|
||||
return msg
|
||||
|
||||
def send(self, rcpt_to, msg=None, subject=None, just_try=False, **forge_args):
|
||||
"""
|
||||
Send an email
|
||||
|
||||
:param rcpt_to: The recipient of the email. Could be a tuple(name, email)
|
||||
or just the email of the recipient.
|
||||
:param msg: The message of this email (as MIMEBase or derivated classes)
|
||||
:param subject: The subject of the email (only if the message is not provided
|
||||
using msg parameter)
|
||||
:param just_try: Enable just try mode (do not really send email, default: as defined on initialization)
|
||||
|
||||
All other parameters will be consider as parameters to forge the message
|
||||
(only if the message is not provided using msg parameter).
|
||||
"""
|
||||
msg = msg if msg else self.forge_message(rcpt_to, subject, **forge_args)
|
||||
|
||||
if just_try or self.just_try:
|
||||
log.debug('Just-try mode: do not really send this email to %s (subject="%s")', rcpt_to, subject or msg.get('subject', 'No subject'))
|
||||
return True
|
||||
|
||||
if self.catch_all_addr:
|
||||
catch_addr = self.catch_all_addr
|
||||
log.debug('Catch email originaly send to %s to %s', rcpt_to, catch_addr)
|
||||
rcpt_to = catch_addr
|
||||
|
||||
try:
|
||||
if self.smtp_ssl:
|
||||
logging.info("Establish SSL connection to server %s:%s", self.smtp_host, self.smtp_port)
|
||||
server = smtplib.SMTP_SSL(self.smtp_host, self.smtp_port)
|
||||
else:
|
||||
logging.info("Establish connection to server %s:%s", self.smtp_host, self.smtp_port)
|
||||
server = smtplib.SMTP(self.smtp_host, self.smtp_port)
|
||||
if self.smtp_tls:
|
||||
logging.info('Start TLS on SMTP connection')
|
||||
server.starttls()
|
||||
except smtplib.SMTPException:
|
||||
log.error('Error connecting to SMTP server %s:%s', self.smtp_host, self.smtp_port, exc_info=True)
|
||||
return False
|
||||
|
||||
if self.smtp_debug:
|
||||
server.set_debuglevel(True)
|
||||
|
||||
if self.smtp_user and self.smtp_password:
|
||||
try:
|
||||
log.info('Try to authenticate on SMTP connection as %s', self.smtp_user)
|
||||
server.login(self.smtp_user, self.smtp_password)
|
||||
except smtplib.SMTPException:
|
||||
log.error('Error authenticating on SMTP server %s:%s with user %s', self.smtp_host, self.smtp_port, self.smtp_user, exc_info=True)
|
||||
return False
|
||||
|
||||
error = False
|
||||
try:
|
||||
log.info('Sending email to %s', rcpt_to)
|
||||
server.sendmail(self.sender_email, [rcpt_to[1] if isinstance(rcpt_to, tuple) else rcpt_to], msg.as_string())
|
||||
except smtplib.SMTPException:
|
||||
error = True
|
||||
log.error('Error sending email to %s', rcpt_to, exc_info=True)
|
||||
finally:
|
||||
server.quit()
|
||||
|
||||
return not error
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
# Run tests
|
||||
import datetime
|
||||
import sys
|
||||
|
||||
import argparse
|
||||
|
||||
# Options parser
|
||||
parser = argparse.ArgumentParser()
|
||||
|
||||
parser.add_argument(
|
||||
'-v', '--verbose',
|
||||
action="store_true",
|
||||
dest="verbose",
|
||||
help="Enable verbose mode"
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'-d', '--debug',
|
||||
action="store_true",
|
||||
dest="debug",
|
||||
help="Enable debug mode"
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'-l', '--log-file',
|
||||
action="store",
|
||||
type=str,
|
||||
dest="logfile",
|
||||
help="Log file path"
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'-j', '--just-try',
|
||||
action="store_true",
|
||||
dest="just_try",
|
||||
help="Enable just-try mode"
|
||||
)
|
||||
|
||||
email_opts = parser.add_argument_group('Email options')
|
||||
|
||||
email_opts.add_argument(
|
||||
'-H', '--smtp-host',
|
||||
action="store",
|
||||
type=str,
|
||||
dest="email_smtp_host",
|
||||
help="SMTP host"
|
||||
)
|
||||
|
||||
email_opts.add_argument(
|
||||
'-P', '--smtp-port',
|
||||
action="store",
|
||||
type=int,
|
||||
dest="email_smtp_port",
|
||||
help="SMTP port"
|
||||
)
|
||||
|
||||
email_opts.add_argument(
|
||||
'-S', '--smtp-ssl',
|
||||
action="store_true",
|
||||
dest="email_smtp_ssl",
|
||||
help="Use SSL"
|
||||
)
|
||||
|
||||
email_opts.add_argument(
|
||||
'-T', '--smtp-tls',
|
||||
action="store_true",
|
||||
dest="email_smtp_tls",
|
||||
help="Use TLS"
|
||||
)
|
||||
|
||||
email_opts.add_argument(
|
||||
'-u', '--smtp-user',
|
||||
action="store",
|
||||
type=str,
|
||||
dest="email_smtp_user",
|
||||
help="SMTP username"
|
||||
)
|
||||
|
||||
email_opts.add_argument(
|
||||
'-p', '--smtp-password',
|
||||
action="store",
|
||||
type=str,
|
||||
dest="email_smtp_password",
|
||||
help="SMTP password"
|
||||
)
|
||||
|
||||
email_opts.add_argument(
|
||||
'-D', '--smtp-debug',
|
||||
action="store_true",
|
||||
dest="email_smtp_debug",
|
||||
help="Debug SMTP connection"
|
||||
)
|
||||
|
||||
email_opts.add_argument(
|
||||
'-e', '--email-encoding',
|
||||
action="store",
|
||||
type=str,
|
||||
dest="email_encoding",
|
||||
help="SMTP encoding"
|
||||
)
|
||||
|
||||
email_opts.add_argument(
|
||||
'-f', '--sender-name',
|
||||
action="store",
|
||||
type=str,
|
||||
dest="email_sender_name",
|
||||
help="Sender name"
|
||||
)
|
||||
|
||||
email_opts.add_argument(
|
||||
'-F', '--sender-email',
|
||||
action="store",
|
||||
type=str,
|
||||
dest="email_sender_email",
|
||||
help="Sender email"
|
||||
)
|
||||
|
||||
email_opts.add_argument(
|
||||
'-C', '--catch-all',
|
||||
action="store",
|
||||
type=str,
|
||||
dest="email_catch_all",
|
||||
help="Catch all sent email: specify catch recipient email address"
|
||||
)
|
||||
|
||||
test_opts = parser.add_argument_group('Test email options')
|
||||
|
||||
test_opts.add_argument(
|
||||
'-t', '--to',
|
||||
action="store",
|
||||
type=str,
|
||||
dest="test_to",
|
||||
help="Test email recipient",
|
||||
)
|
||||
|
||||
test_opts.add_argument(
|
||||
'-m', '--mako',
|
||||
action="store_true",
|
||||
dest="test_mako",
|
||||
help="Test mako templating",
|
||||
)
|
||||
|
||||
options = parser.parse_args()
|
||||
|
||||
if not options.test_to:
|
||||
parser.error('You must specify test email recipient using -t/--to parameter')
|
||||
sys.exit(1)
|
||||
|
||||
# Initialize logs
|
||||
logformat = '%(asctime)s - Test EmailClient - %(levelname)s - %(message)s'
|
||||
if options.debug:
|
||||
loglevel = logging.DEBUG
|
||||
elif options.verbose:
|
||||
loglevel = logging.INFO
|
||||
else:
|
||||
loglevel = logging.WARNING
|
||||
|
||||
if options.logfile:
|
||||
logging.basicConfig(filename=options.logfile, level=loglevel, format=logformat)
|
||||
else:
|
||||
logging.basicConfig(level=loglevel, format=logformat)
|
||||
|
||||
if options.email_smtp_user and not options.email_smtp_password:
|
||||
import getpass
|
||||
options.email_smtp_password = getpass.getpass('Please enter SMTP password: ')
|
||||
|
||||
logging.info('Initialize Email client')
|
||||
email_client = EmailClient(
|
||||
smtp_host=options.email_smtp_host,
|
||||
smtp_port=options.email_smtp_port,
|
||||
smtp_ssl=options.email_smtp_ssl,
|
||||
smtp_tls=options.email_smtp_tls,
|
||||
smtp_user=options.email_smtp_user,
|
||||
smtp_password=options.email_smtp_password,
|
||||
smtp_debug=options.email_smtp_debug,
|
||||
sender_name=options.email_sender_name,
|
||||
sender_email=options.email_sender_email,
|
||||
catch_all_addr=options.email_catch_all,
|
||||
just_try=options.just_try,
|
||||
encoding=options.email_encoding,
|
||||
templates=dict(
|
||||
test=dict(
|
||||
subject="Test email",
|
||||
text=(
|
||||
"Just a test email sent at {sent_date}." if not options.test_mako else
|
||||
MakoTemplate("Just a test email sent at ${sent_date}.")
|
||||
),
|
||||
html=(
|
||||
"<strong>Just a test email.</strong> <small>(sent at {sent_date})</small>" if not options.test_mako else
|
||||
MakoTemplate("<strong>Just a test email.</strong> <small>(sent at ${sent_date})</small>")
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
logging.info('Send a test email to %s', options.test_to)
|
||||
if email_client.send(options.test_to, template='test', sent_date=datetime.datetime.now()):
|
||||
logging.info('Test email sent')
|
||||
sys.exit(0)
|
||||
logging.error('Fail to send test email')
|
||||
sys.exit(1)
|
174
HashMap.py
174
HashMap.py
|
@ -1,174 +0,0 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# My hash mapping library
|
||||
#
|
||||
# Mapping configuration
|
||||
# {
|
||||
# '[dst key 1]': { # Key name in the result
|
||||
#
|
||||
# 'order': [int], # Processing order between destinations keys
|
||||
#
|
||||
# # Source values
|
||||
# 'other_key': [key], # Other key of the destination to use as source of values
|
||||
# 'key' : '[src key]', # Key of source hash to get source values
|
||||
# 'keys' : ['[sk1]', '[sk2]', ...], # List of source hash's keys to get source values
|
||||
#
|
||||
# # Clean / convert values
|
||||
# 'cleanRegex': '[regex]', # Regex that be use to remove unwanted characters. Ex : [^0-9+]
|
||||
# 'convert': [function], # Function to use to convert value : Original value will be passed
|
||||
# # as argument and the value retrieve will replace source value in
|
||||
# # the result
|
||||
# # Ex :
|
||||
# # lambda x: x.strip()
|
||||
# # lambda x: "myformat : %s" % x
|
||||
# # Deduplicate / check values
|
||||
# 'deduplicate': [bool], # If True, sources values will be depluplicated
|
||||
# 'check': [function], # Function to use to check source value : Source value will be passed
|
||||
# # as argument and if function return True, the value will be preserved
|
||||
# # Ex :
|
||||
# # lambda x: x in my_global_hash
|
||||
# # Join values
|
||||
# 'join': '[glue]', # If present, sources values will be join using the "glue"
|
||||
#
|
||||
# # Alternative mapping
|
||||
# 'or': { [map configuration] } # If this mapping case does not retreive any value, try to get value(s)
|
||||
# # with this other mapping configuration
|
||||
# },
|
||||
# '[dst key 2]': {
|
||||
# [...]
|
||||
# }
|
||||
# }
|
||||
#
|
||||
# Return format :
|
||||
# {
|
||||
# '[dst key 1]': ['v1','v2', ...],
|
||||
# '[dst key 2]': [ ... ],
|
||||
# [...]
|
||||
# }
|
||||
|
||||
import logging, re
|
||||
|
||||
def clean_value(value):
|
||||
if isinstance(value, int):
|
||||
value=str(value)
|
||||
return value.encode('utf8')
|
||||
|
||||
def map(map_keys,src,dst={}):
|
||||
|
||||
def get_values(dst_key,src,m):
|
||||
# Extract sources values
|
||||
values=[]
|
||||
if 'other_key' in m:
|
||||
if m['other_key'] in dst:
|
||||
values=dst[m['other_key']]
|
||||
if 'key' in m:
|
||||
if m['key'] in src and src[m['key']]!='':
|
||||
values.append(clean_value(src[m['key']]))
|
||||
|
||||
if 'keys' in m:
|
||||
for key in m['keys']:
|
||||
if key in src and src[key]!='':
|
||||
values.append(clean_value(src[key]))
|
||||
|
||||
# Clean and convert values
|
||||
if 'cleanRegex' in m and len(values)>0:
|
||||
new_values=[]
|
||||
for v in values:
|
||||
nv=re.sub(m['cleanRegex'],'',v)
|
||||
if nv!='':
|
||||
new_values.append(nv)
|
||||
values=new_values
|
||||
|
||||
if 'convert' in m and len(values)>0:
|
||||
new_values=[]
|
||||
for v in values:
|
||||
nv=m['convert'](v)
|
||||
if nv!='':
|
||||
new_values.append(nv)
|
||||
values=new_values
|
||||
|
||||
# Deduplicate values
|
||||
if m.get('deduplicate') and len(values)>1:
|
||||
new_values=[]
|
||||
for v in values:
|
||||
if v not in new_values:
|
||||
new_values.append(v)
|
||||
values=new_values
|
||||
|
||||
# Check values
|
||||
if 'check' in m and len(values)>0:
|
||||
new_values=[]
|
||||
for v in values:
|
||||
if m['check'](v):
|
||||
new_values.append(v)
|
||||
else:
|
||||
logging.debug('Invalid value %s for key %s' % (v,dst_key))
|
||||
if dst_key not in invalid_values:
|
||||
invalid_values[dst_key]=[]
|
||||
if v not in invalid_values[dst_key]:
|
||||
invalid_values[dst_key].append(v)
|
||||
values=new_values
|
||||
|
||||
# Join values
|
||||
if 'join' in m and len(values)>1:
|
||||
values=[m['join'].join(values)]
|
||||
|
||||
# Manage alternative mapping case
|
||||
if len(values)==0 and 'or' in m:
|
||||
values=get_values(dst_key,src,m['or'])
|
||||
|
||||
|
||||
return values
|
||||
|
||||
for dst_key in sorted(map_keys.keys(), key=lambda x: map_keys[x]['order']):
|
||||
values=get_values(dst_key,src,map_keys[dst_key])
|
||||
|
||||
if len(values)==0:
|
||||
if 'required' in map_keys[dst_key] and map_keys[dst_key]['required']:
|
||||
logging.debug('Destination key %s could not be filled from source but is required' % dst_key)
|
||||
return False
|
||||
continue
|
||||
|
||||
dst[dst_key]=values
|
||||
return dst
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
|
||||
src={
|
||||
'uid': 'hmartin',
|
||||
'firstname': 'Martin',
|
||||
'lastname': 'Martin',
|
||||
'disp_name': 'Henri Martin',
|
||||
'line_1': '3 rue de Paris',
|
||||
'line_2': 'Pour Pierre',
|
||||
'zip_text': '92 120',
|
||||
'city_text': 'Montrouge',
|
||||
'line_city': '92120 Montrouge',
|
||||
'tel1': '01 00 00 00 00',
|
||||
'tel2': '09 00 00 00 00',
|
||||
'mobile': '06 00 00 00 00',
|
||||
'fax': '01 00 00 00 00',
|
||||
'email': 'H.MARTIN@GMAIL.COM',
|
||||
}
|
||||
|
||||
map_c={
|
||||
'uid': {'order': 0, 'key': 'uid','required': True},
|
||||
'givenName': {'order': 1, 'key': 'firstname'},
|
||||
'sn': {'order': 2, 'key': 'lastname'},
|
||||
'cn': {'order': 3, 'key': 'disp_name','required': True, 'or': {'attrs': ['firstname','lastname'],'join': ' '}},
|
||||
'displayName': {'order': 4, 'other_key': 'displayName'},
|
||||
'street': {'order': 5, 'join': ' / ', 'keys': ['ligne_1','ligne_2']},
|
||||
'postalCode': {'order': 6, 'key': 'zip_text', 'cleanRegex': '[^0-9]'},
|
||||
'l': {'order': 7, 'key': 'city_text'},
|
||||
'postalAddress': {'order': 8, 'join': '$', 'keys': ['ligne_1','ligne_2','ligne_city']},
|
||||
'telephoneNumber': {'order': 9, 'keys': ['tel1','tel2'], 'cleanRegex': '[^0-9+]', 'deduplicate': True},
|
||||
'mobile': {'order': 10,'key': 'mobile'},
|
||||
'facsimileTelephoneNumber': {'order': 11,'key': 'fax'},
|
||||
'mail': {'order': 12,'key': 'email', 'convert': lambda x: x.lower().strip()}
|
||||
}
|
||||
|
||||
logging.debug('[TEST] Map src=%s / config= %s' % (src,map_c))
|
||||
logging.debug('[TEST] Result : %s' % map(map_c,src))
|
456
LdapServer.py
456
LdapServer.py
|
@ -1,456 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import copy
|
||||
import datetime
|
||||
import logging
|
||||
|
||||
import dateutil.parser
|
||||
import dateutil.tz
|
||||
import ldap
|
||||
from ldap.controls import SimplePagedResultsControl
|
||||
from ldap.controls.simple import RelaxRulesControl
|
||||
import ldap.modlist as modlist
|
||||
import pytz
|
||||
|
||||
class LdapServer(object): # pylint: disable=useless-object-inheritance
|
||||
|
||||
uri = None
|
||||
dn = None
|
||||
pwd = None
|
||||
v2 = None
|
||||
|
||||
con = 0
|
||||
|
||||
def __init__(self,uri,dn=None,pwd=None,v2=None,raiseOnError=False, logger=False):
|
||||
self.uri = uri
|
||||
self.dn = dn
|
||||
self.pwd = pwd
|
||||
self.raiseOnError = raiseOnError
|
||||
if v2:
|
||||
self.v2=True
|
||||
if logger:
|
||||
self.logger = logger
|
||||
else:
|
||||
self.logger = logging.getLogger()
|
||||
|
||||
def _error(self,error,level=logging.WARNING):
|
||||
if self.raiseOnError:
|
||||
raise LdapServerException(error)
|
||||
self.logger.log(level, error)
|
||||
|
||||
def connect(self):
|
||||
if self.con == 0:
|
||||
try:
|
||||
con = ldap.initialize(self.uri)
|
||||
if self.v2:
|
||||
con.protocol_version = ldap.VERSION2 # pylint: disable=no-member
|
||||
else:
|
||||
con.protocol_version = ldap.VERSION3 # pylint: disable=no-member
|
||||
|
||||
if self.dn:
|
||||
con.simple_bind_s(self.dn,self.pwd)
|
||||
elif self.uri.startswith('ldapi://'):
|
||||
con.sasl_interactive_bind_s("", ldap.sasl.external())
|
||||
|
||||
self.con = con
|
||||
return True
|
||||
except ldap.LDAPError as e: # pylint: disable=no-member
|
||||
self._error('LdapServer - Error connecting and binding to LDAP server : %s' % e,logging.CRITICAL)
|
||||
return False
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def get_scope(scope):
|
||||
if scope == 'base':
|
||||
return ldap.SCOPE_BASE # pylint: disable=no-member
|
||||
if scope == 'one':
|
||||
return ldap.SCOPE_ONELEVEL # pylint: disable=no-member
|
||||
if scope == 'sub':
|
||||
return ldap.SCOPE_SUBTREE # pylint: disable=no-member
|
||||
raise Exception("Unknown LDAP scope '%s'" % scope)
|
||||
|
||||
def search(self, basedn, filterstr=None, attrs=None, sizelimit=0, scope=None):
|
||||
res_id = self.con.search(
|
||||
basedn,
|
||||
self.get_scope(scope if scope else 'sub'),
|
||||
filterstr if filterstr else '(objectClass=*)',
|
||||
attrs if attrs else []
|
||||
)
|
||||
ret = {}
|
||||
c = 0
|
||||
while True:
|
||||
res_type, res_data = self.con.result(res_id,0)
|
||||
if res_data == [] or (sizelimit and c > sizelimit):
|
||||
break
|
||||
if res_type == ldap.RES_SEARCH_ENTRY: # pylint: disable=no-member
|
||||
ret[res_data[0][0]] = res_data[0][1]
|
||||
c += 1
|
||||
return ret
|
||||
|
||||
def get_object(self, dn, filterstr=None, attrs=None):
|
||||
result = self.search(dn, filterstr=filterstr, scope='base', attrs=attrs)
|
||||
return result[dn] if dn in result else None
|
||||
|
||||
def paged_search(self, basedn, filterstr, attrs, scope='sub', pagesize=500):
|
||||
assert not self.v2, "Paged search is not available on LDAP version 2"
|
||||
# Initialize SimplePagedResultsControl object
|
||||
page_control = SimplePagedResultsControl(
|
||||
True,
|
||||
size=pagesize,
|
||||
cookie='' # Start without cookie
|
||||
)
|
||||
ret = {}
|
||||
pages_count = 0
|
||||
self.logger.debug(
|
||||
"LdapServer - Paged search with base DN '%s', filter '%s', scope '%s', pagesize=%d and attrs=%s",
|
||||
basedn,
|
||||
filterstr,
|
||||
scope,
|
||||
pagesize,
|
||||
attrs
|
||||
)
|
||||
while True:
|
||||
pages_count += 1
|
||||
self.logger.debug(
|
||||
"LdapServer - Paged search: request page %d with a maximum of %d objects (current total count: %d)",
|
||||
pages_count,
|
||||
pagesize,
|
||||
len(ret)
|
||||
)
|
||||
try:
|
||||
res_id = self.con.search_ext(
|
||||
basedn,
|
||||
self.get_scope(scope),
|
||||
filterstr,
|
||||
attrs,
|
||||
serverctrls=[page_control]
|
||||
)
|
||||
except ldap.LDAPError as e: # pylint: disable=no-member
|
||||
self._error('LdapServer - Error running paged search on LDAP server: %s' % e, logging.CRITICAL)
|
||||
return False
|
||||
try:
|
||||
rtype, rdata, rmsgid, rctrls = self.con.result3(res_id) # pylint: disable=unused-variable
|
||||
except ldap.LDAPError as e: # pylint: disable=no-member
|
||||
self._error('LdapServer - Error pulling paged search result from LDAP server: %s' % e, logging.CRITICAL)
|
||||
return False
|
||||
|
||||
# Detect and catch PagedResultsControl answer from rctrls
|
||||
result_page_control = None
|
||||
if rctrls:
|
||||
for rctrl in rctrls:
|
||||
if rctrl.controlType == SimplePagedResultsControl.controlType:
|
||||
result_page_control = rctrl
|
||||
break
|
||||
|
||||
# If PagedResultsControl answer not detected, paged serach
|
||||
if not result_page_control:
|
||||
self._error('LdapServer - Server ignores RFC2696 control, paged search can not works', logging.CRITICAL)
|
||||
return False
|
||||
|
||||
# Store results of this page
|
||||
for obj_dn, obj_attrs in rdata:
|
||||
ret[obj_dn] = obj_attrs
|
||||
|
||||
# If no cookie returned, we are done
|
||||
if not result_page_control.cookie:
|
||||
break
|
||||
|
||||
# Otherwise, set cookie for the next search
|
||||
page_control.cookie = result_page_control.cookie
|
||||
|
||||
self.logger.debug("LdapServer - Paged search end: %d object(s) retreived in %d page(s) of %d object(s)", len(ret), pages_count, pagesize)
|
||||
return ret
|
||||
|
||||
def add_object(self,dn,attrs):
|
||||
ldif = modlist.addModlist(attrs)
|
||||
try:
|
||||
self.logger.debug("LdapServer - Add %s", dn)
|
||||
self.con.add_s(dn,ldif)
|
||||
return True
|
||||
except ldap.LDAPError as e: # pylint: disable=no-member
|
||||
self._error("LdapServer - Error adding %s : %s" % (dn,e), logging.ERROR)
|
||||
|
||||
return False
|
||||
|
||||
def update_object(self, dn, old, new, ignore_attrs=None, relax=False):
|
||||
assert not relax or not self.v2, "Relax modification is not available on LDAP version 2"
|
||||
ldif = modlist.modifyModlist(
|
||||
old, new,
|
||||
ignore_attr_types=ignore_attrs if ignore_attrs else []
|
||||
)
|
||||
if ldif == []:
|
||||
return True
|
||||
try:
|
||||
if relax:
|
||||
self.con.modify_ext_s(dn, ldif, serverctrls=[RelaxRulesControl()])
|
||||
else:
|
||||
self.con.modify_s(dn, ldif)
|
||||
return True
|
||||
except ldap.LDAPError as e: # pylint: disable=no-member
|
||||
self._error("LdapServer - Error updating %s : %s\nOld : %s\nNew : %s" % (dn, e, old, new), logging.ERROR)
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def update_need(old, new, ignore_attrs=None):
|
||||
ldif = modlist.modifyModlist(
|
||||
old, new,
|
||||
ignore_attr_types=ignore_attrs if ignore_attrs else []
|
||||
)
|
||||
if ldif == []:
|
||||
return False
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def get_changes(old, new, ignore_attrs=None):
|
||||
return modlist.modifyModlist(
|
||||
old, new,
|
||||
ignore_attr_types=ignore_attrs if ignore_attrs else []
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def format_changes(old, new, ignore_attrs=None, prefix=None):
|
||||
msg = []
|
||||
for (op, attr, val) in modlist.modifyModlist(old, new, ignore_attr_types=ignore_attrs if ignore_attrs else []):
|
||||
if op == ldap.MOD_ADD: # pylint: disable=no-member
|
||||
op = 'ADD'
|
||||
elif op == ldap.MOD_DELETE: # pylint: disable=no-member
|
||||
op = 'DELETE'
|
||||
elif op == ldap.MOD_REPLACE: # pylint: disable=no-member
|
||||
op = 'REPLACE'
|
||||
else:
|
||||
op = 'UNKNOWN (=%s)' % op
|
||||
if val is None and op == 'DELETE':
|
||||
msg.append('%s - %s %s' % (prefix if prefix else '', op, attr))
|
||||
else:
|
||||
msg.append('%s - %s %s: %s' % (prefix, op, attr, val))
|
||||
return '\n'.join(msg)
|
||||
|
||||
def rename_object(self, dn, new_rdn, new_sup=None, delete_old=True):
|
||||
# If new_rdn is a complete DN, split new RDN and new superior DN
|
||||
if len(new_rdn.split(',')) > 1:
|
||||
self.logger.debug(
|
||||
"LdapServer - Rename with a full new DN detected (%s): split new RDN and new superior DN",
|
||||
new_rdn
|
||||
)
|
||||
assert new_sup is None, "You can't provide a complete DN as new_rdn and also provide new_sup parameter"
|
||||
new_dn_parts = new_rdn.split(',')
|
||||
new_sup = ','.join(new_dn_parts[1:])
|
||||
new_rdn = new_dn_parts[0]
|
||||
try:
|
||||
self.logger.debug(
|
||||
"LdapServer - Rename %s in %s (new superior: %s, delete old: %s)",
|
||||
dn,
|
||||
new_rdn,
|
||||
"same" if new_sup is None else new_sup,
|
||||
delete_old
|
||||
)
|
||||
self.con.rename_s(dn, new_rdn, newsuperior=new_sup, delold=delete_old)
|
||||
return True
|
||||
except ldap.LDAPError as e: # pylint: disable=no-member
|
||||
self._error(
|
||||
"LdapServer - Error renaming %s in %s (new superior: %s, delete old: %s): %s" % (
|
||||
dn,
|
||||
new_rdn,
|
||||
"same" if new_sup is None else new_sup,
|
||||
delete_old,
|
||||
e
|
||||
),
|
||||
logging.ERROR
|
||||
)
|
||||
|
||||
return False
|
||||
|
||||
def drop_object(self, dn):
|
||||
try:
|
||||
self.logger.debug("LdapServer - Delete %s", dn)
|
||||
self.con.delete_s(dn)
|
||||
return True
|
||||
except ldap.LDAPError as e: # pylint: disable=no-member
|
||||
self._error("LdapServer - Error deleting %s : %s" % (dn,e), logging.ERROR)
|
||||
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def get_dn(obj):
|
||||
return obj[0][0]
|
||||
|
||||
@staticmethod
|
||||
def get_attr(obj, attr, all=None, default=None):
|
||||
if attr not in obj:
|
||||
for k in obj:
|
||||
if k.lower() == attr.lower():
|
||||
attr = k
|
||||
break
|
||||
if all is not None:
|
||||
if attr in obj:
|
||||
return obj[attr]
|
||||
return default or []
|
||||
if attr in obj:
|
||||
return obj[attr][0]
|
||||
return default
|
||||
|
||||
class LdapServerException(BaseException):
|
||||
def __init__(self,msg):
|
||||
BaseException.__init__(self, msg)
|
||||
|
||||
#
|
||||
# Helpers
|
||||
#
|
||||
def parse_datetime(value, to_timezone=None, default_timezone=None, naive=None):
|
||||
"""
|
||||
Convert LDAP date string to datetime.datetime object
|
||||
|
||||
:param value: The LDAP date string to convert
|
||||
:param to_timezone: If specified, the return datetime will be converted to this
|
||||
specific timezone (optional, default : timezone of the LDAP date string)
|
||||
:param default_timezone: The timezone used if LDAP date string does not specified
|
||||
the timezone (optional, default : server local timezone)
|
||||
:param naive: Use naive datetime : return naive datetime object (without timezone conversion from LDAP)
|
||||
"""
|
||||
assert to_timezone is None or isinstance(to_timezone, (datetime.tzinfo, str)), 'to_timezone must be None, a datetime.tzinfo object or a string (not %s)' % type(to_timezone)
|
||||
assert default_timezone is None or isinstance(default_timezone, (datetime.tzinfo, pytz.tzinfo.DstTzInfo, str)), 'default_timezone parameter must be None, a string, a pytz.tzinfo.DstTzInfo or a datetime.tzinfo object (not %s)' % type(default_timezone)
|
||||
date = dateutil.parser.parse(value, dayfirst=False)
|
||||
if not date.tzinfo:
|
||||
if naive:
|
||||
return date
|
||||
if not default_timezone:
|
||||
default_timezone = pytz.utc
|
||||
elif default_timezone == 'local':
|
||||
default_timezone = dateutil.tz.tzlocal()
|
||||
elif isinstance(default_timezone, str):
|
||||
default_timezone = pytz.timezone(default_timezone)
|
||||
if isinstance(default_timezone, pytz.tzinfo.DstTzInfo):
|
||||
date = default_timezone.localize(date)
|
||||
elif isinstance(default_timezone, datetime.tzinfo):
|
||||
date = date.replace(tzinfo=default_timezone)
|
||||
else:
|
||||
raise Exception("It's not supposed to happen!")
|
||||
elif naive:
|
||||
return date.replace(tzinfo=None)
|
||||
if to_timezone:
|
||||
if to_timezone == 'local':
|
||||
to_timezone = dateutil.tz.tzlocal()
|
||||
elif isinstance(to_timezone, str):
|
||||
to_timezone = pytz.timezone(to_timezone)
|
||||
return date.astimezone(to_timezone)
|
||||
return date
|
||||
|
||||
def parse_date(value, to_timezone=None, default_timezone=None, naive=None):
|
||||
"""
|
||||
Convert LDAP date string to datetime.date object
|
||||
|
||||
:param value: The LDAP date string to convert
|
||||
:param to_timezone: If specified, the return datetime will be converted to this
|
||||
specific timezone (optional, default : timezone of the LDAP date string)
|
||||
:param default_timezone: The timezone used if LDAP date string does not specified
|
||||
the timezone (optional, default : server local timezone)
|
||||
:param naive: Use naive datetime : do not handle timezone conversion from LDAP
|
||||
"""
|
||||
return parse_datetime(value, to_timezone, default_timezone, naive).date()
|
||||
|
||||
def format_datetime(value, from_timezone=None, to_timezone=None, naive=None):
|
||||
"""
|
||||
Convert datetime.datetime object to LDAP date string
|
||||
|
||||
:param value: The datetime.datetime object to convert
|
||||
:param from_timezone: The timezone used if datetime.datetime object is naive (no tzinfo)
|
||||
(optional, default : server local timezone)
|
||||
:param to_timezone: The timezone used in LDAP (optional, default : UTC)
|
||||
:param naive: Use naive datetime : datetime store as UTC in LDAP (without conversion)
|
||||
"""
|
||||
assert isinstance(value, datetime.datetime), 'First parameter must be an datetime.datetime object (not %s)' % type(value)
|
||||
assert from_timezone is None or isinstance(from_timezone, (datetime.tzinfo, pytz.tzinfo.DstTzInfo, str)), 'from_timezone parameter must be None, a string, a pytz.tzinfo.DstTzInfo or a datetime.tzinfo object (not %s)' % type(from_timezone)
|
||||
assert to_timezone is None or isinstance(to_timezone, (datetime.tzinfo, str)), 'to_timezone must be None, a datetime.tzinfo object or a string (not %s)' % type(to_timezone)
|
||||
if not value.tzinfo and not naive:
|
||||
if not from_timezone or from_timezone == 'local':
|
||||
from_timezone = dateutil.tz.tzlocal()
|
||||
elif isinstance(from_timezone, str):
|
||||
from_timezone = pytz.timezone(from_timezone)
|
||||
if isinstance(from_timezone, pytz.tzinfo.DstTzInfo):
|
||||
from_value = from_timezone.localize(value)
|
||||
elif isinstance(from_timezone, datetime.tzinfo):
|
||||
from_value = value.replace(tzinfo=from_timezone)
|
||||
else:
|
||||
raise Exception("It's not supposed to happen!")
|
||||
elif naive:
|
||||
from_value = value.replace(tzinfo=pytz.utc)
|
||||
else:
|
||||
from_value = copy.deepcopy(value)
|
||||
if not to_timezone:
|
||||
to_timezone = pytz.utc
|
||||
elif to_timezone == 'local':
|
||||
to_timezone = dateutil.tz.tzlocal()
|
||||
elif isinstance(to_timezone, str):
|
||||
to_timezone = pytz.timezone(to_timezone)
|
||||
to_value = from_value.astimezone(to_timezone) if not naive else from_value
|
||||
datestring = to_value.strftime('%Y%m%d%H%M%S%z')
|
||||
if datestring.endswith('+0000'):
|
||||
datestring = datestring.replace('+0000', 'Z')
|
||||
return datestring
|
||||
|
||||
def format_date(value, from_timezone=None, to_timezone=None, naive=None):
|
||||
"""
|
||||
Convert datetime.date object to LDAP date string
|
||||
|
||||
:param value: The datetime.date object to convert
|
||||
:param from_timezone: The timezone used if datetime.datetime object is naive (no tzinfo)
|
||||
(optional, default : server local timezone)
|
||||
:param to_timezone: The timezone used in LDAP (optional, default : UTC)
|
||||
:param naive: Use naive datetime : do not handle timezone conversion before formating
|
||||
and return datetime as UTC (because LDAP required a timezone)
|
||||
"""
|
||||
assert isinstance(value, datetime.date), 'First parameter must be an datetime.date object (not %s)' % type(value)
|
||||
return format_datetime(datetime.datetime.combine(value, datetime.datetime.min.time()), from_timezone, to_timezone, naive)
|
||||
|
||||
#
|
||||
# Tests
|
||||
#
|
||||
if __name__ == '__main__':
|
||||
now = datetime.datetime.now().replace(tzinfo=dateutil.tz.tzlocal())
|
||||
print("Now = %s" % now)
|
||||
|
||||
datestring_now = format_datetime(now)
|
||||
print("format_datetime : %s" % datestring_now)
|
||||
print("format_datetime (from_timezone=utc) : %s" % format_datetime(now.replace(tzinfo=None), from_timezone=pytz.utc))
|
||||
print("format_datetime (from_timezone=local) : %s" % format_datetime(now.replace(tzinfo=None), from_timezone=dateutil.tz.tzlocal()))
|
||||
print("format_datetime (from_timezone='local') : %s" % format_datetime(now.replace(tzinfo=None), from_timezone='local'))
|
||||
print("format_datetime (from_timezone=Paris) : %s" % format_datetime(now.replace(tzinfo=None), from_timezone='Europe/Paris'))
|
||||
print("format_datetime (to_timezone=utc) : %s" % format_datetime(now, to_timezone=pytz.utc))
|
||||
print("format_datetime (to_timezone=local) : %s" % format_datetime(now, to_timezone=dateutil.tz.tzlocal()))
|
||||
print("format_datetime (to_timezone='local') : %s" % format_datetime(now, to_timezone='local'))
|
||||
print("format_datetime (to_timezone=Tokyo) : %s" % format_datetime(now, to_timezone='Asia/Tokyo'))
|
||||
print("format_datetime (naive=True) : %s" % format_datetime(now, naive=True))
|
||||
|
||||
print("format_date : %s" % format_date(now))
|
||||
print("format_date (from_timezone=utc) : %s" % format_date(now.replace(tzinfo=None), from_timezone=pytz.utc))
|
||||
print("format_date (from_timezone=local) : %s" % format_date(now.replace(tzinfo=None), from_timezone=dateutil.tz.tzlocal()))
|
||||
print("format_date (from_timezone='local') : %s" % format_date(now.replace(tzinfo=None), from_timezone='local'))
|
||||
print("format_date (from_timezone=Paris) : %s" % format_date(now.replace(tzinfo=None), from_timezone='Europe/Paris'))
|
||||
print("format_date (to_timezone=utc) : %s" % format_date(now, to_timezone=pytz.utc))
|
||||
print("format_date (to_timezone=local) : %s" % format_date(now, to_timezone=dateutil.tz.tzlocal()))
|
||||
print("format_date (to_timezone='local') : %s" % format_date(now, to_timezone='local'))
|
||||
print("format_date (to_timezone=Tokyo) : %s" % format_date(now, to_timezone='Asia/Tokyo'))
|
||||
print("format_date (naive=True) : %s" % format_date(now, naive=True))
|
||||
|
||||
|
||||
print("parse_datetime : %s" % parse_datetime(datestring_now))
|
||||
print("parse_datetime (default_timezone=utc) : %s" % parse_datetime(datestring_now[0:-1], default_timezone=pytz.utc))
|
||||
print("parse_datetime (default_timezone=local) : %s" % parse_datetime(datestring_now[0:-1], default_timezone=dateutil.tz.tzlocal()))
|
||||
print("parse_datetime (default_timezone='local') : %s" % parse_datetime(datestring_now[0:-1], default_timezone='local'))
|
||||
print("parse_datetime (default_timezone=Paris) : %s" % parse_datetime(datestring_now[0:-1], default_timezone='Europe/Paris'))
|
||||
print("parse_datetime (to_timezone=utc) : %s" % parse_datetime(datestring_now, to_timezone=pytz.utc))
|
||||
print("parse_datetime (to_timezone=local) : %s" % parse_datetime(datestring_now, to_timezone=dateutil.tz.tzlocal()))
|
||||
print("parse_datetime (to_timezone='local') : %s" % parse_datetime(datestring_now, to_timezone='local'))
|
||||
print("parse_datetime (to_timezone=Tokyo) : %s" % parse_datetime(datestring_now, to_timezone='Asia/Tokyo'))
|
||||
print("parse_datetime (naive=True) : %s" % parse_datetime(datestring_now, naive=True))
|
||||
|
||||
print("parse_date : %s" % parse_date(datestring_now))
|
||||
print("parse_date (default_timezone=utc) : %s" % parse_date(datestring_now[0:-1], default_timezone=pytz.utc))
|
||||
print("parse_date (default_timezone=local) : %s" % parse_date(datestring_now[0:-1], default_timezone=dateutil.tz.tzlocal()))
|
||||
print("parse_date (default_timezone='local') : %s" % parse_date(datestring_now[0:-1], default_timezone='local'))
|
||||
print("parse_date (default_timezone=Paris) : %s" % parse_date(datestring_now[0:-1], default_timezone='Europe/Paris'))
|
||||
print("parse_date (to_timezone=utc) : %s" % parse_date(datestring_now, to_timezone=pytz.utc))
|
||||
print("parse_date (to_timezone=local) : %s" % parse_date(datestring_now, to_timezone=dateutil.tz.tzlocal()))
|
||||
print("parse_date (to_timezone='local') : %s" % parse_date(datestring_now, to_timezone='local'))
|
||||
print("parse_date (to_timezone=Tokyo) : %s" % parse_date(datestring_now, to_timezone='Asia/Tokyo'))
|
||||
print("parse_date (naive=True) : %s" % parse_date(datestring_now, naive=True))
|
59
MyDB.py
59
MyDB.py
|
@ -1,59 +0,0 @@
|
|||
#!/usr/bin/python
|
||||
|
||||
import MySQLdb
|
||||
import logging
|
||||
import sys
|
||||
|
||||
class MyDB(object):
|
||||
|
||||
host = ""
|
||||
user = ""
|
||||
pwd = ""
|
||||
db = ""
|
||||
|
||||
con = 0
|
||||
|
||||
def __init__(self,host,user,pwd,db):
|
||||
self.host = host
|
||||
self.user = user
|
||||
self.pwd = pwd
|
||||
self.db = db
|
||||
|
||||
def connect(self):
|
||||
if self.con == 0:
|
||||
try:
|
||||
con = MySQLdb.connect(self.host,self.user,self.pwd,self.db)
|
||||
self.con = con
|
||||
except Exception, e:
|
||||
logging.fatal(e)
|
||||
sys.exit(1)
|
||||
|
||||
def doSQL(self,sql):
|
||||
cursor = self.con.cursor()
|
||||
try:
|
||||
cursor.execute(sql)
|
||||
self.con.commit()
|
||||
return True
|
||||
except Exception, e:
|
||||
logging.error('Erreur durant la requete sql %s : %s' % (sql,e))
|
||||
self.con.rollback()
|
||||
return False
|
||||
|
||||
def doSelect(self,sql):
|
||||
cursor = self.con.cursor()
|
||||
try:
|
||||
cursor.execute(sql)
|
||||
results = cursor.fetchall()
|
||||
return results
|
||||
ret=[]
|
||||
t=0
|
||||
for row in results:
|
||||
c=0
|
||||
for field in row:
|
||||
ret[t][c]=field
|
||||
c=c+1
|
||||
t=t+1
|
||||
return ret
|
||||
except Exception, e:
|
||||
logging.error('Erreur durant la requete sql %s : %s' % (sql,e))
|
||||
return False
|
123
Pbar.py
123
Pbar.py
|
@ -1,123 +0,0 @@
|
|||
#!/usr/bin/python
|
||||
# coding: utf8
|
||||
|
||||
""" Progress bar """
|
||||
|
||||
import logging
|
||||
import progressbar
|
||||
|
||||
class Pbar(object): # pylint: disable=useless-object-inheritance
|
||||
"""
|
||||
Progress bar
|
||||
|
||||
This class abstract a progress bar that could be enable/disable by
|
||||
configuration/script parameters.
|
||||
"""
|
||||
|
||||
__pbar = None
|
||||
__count = None
|
||||
|
||||
def __init__(self, name, maxval, enabled=True):
|
||||
if enabled and maxval:
|
||||
self.__count = 0
|
||||
self.__pbar = progressbar.ProgressBar(
|
||||
widgets=[
|
||||
name + ': ',
|
||||
progressbar.Percentage(),
|
||||
' ',
|
||||
progressbar.Bar(),
|
||||
' ',
|
||||
progressbar.SimpleProgress(),
|
||||
progressbar.ETA()
|
||||
],
|
||||
maxval=maxval
|
||||
).start()
|
||||
else:
|
||||
logging.info(name)
|
||||
|
||||
def increment(self, step=None):
|
||||
"""
|
||||
Increment the progress bar
|
||||
|
||||
:param step: The step (optional, default: 1)
|
||||
"""
|
||||
if self.__pbar:
|
||||
self.__count += step if step else 1
|
||||
self.__pbar.update(self.__count)
|
||||
|
||||
def finish(self):
|
||||
""" Finish the progress bar """
|
||||
if self.__pbar:
|
||||
self.__pbar.finish()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
# Run tests
|
||||
import time
|
||||
import argparse
|
||||
|
||||
default_max_val = 10
|
||||
|
||||
# Options parser
|
||||
parser = argparse.ArgumentParser()
|
||||
|
||||
parser.add_argument(
|
||||
'-v', '--verbose',
|
||||
action="store_true",
|
||||
dest="verbose",
|
||||
help="Enable verbose mode"
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'-d', '--debug',
|
||||
action="store_true",
|
||||
dest="debug",
|
||||
help="Enable debug mode"
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'-l', '--log-file',
|
||||
action="store",
|
||||
type=str,
|
||||
dest="logfile",
|
||||
help="Log file path"
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'-p', '--progress',
|
||||
action="store_true",
|
||||
dest="progress",
|
||||
help="Enable progress bar"
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'-C', '--count',
|
||||
action="store",
|
||||
type=int,
|
||||
dest="count",
|
||||
help="Progress bar max value (default: %s)" % default_max_val,
|
||||
default=default_max_val
|
||||
)
|
||||
|
||||
options = parser.parse_args()
|
||||
|
||||
# Initialize logs
|
||||
logformat = '%(asctime)s - Test Pbar - %(levelname)s - %(message)s'
|
||||
if options.debug:
|
||||
loglevel = logging.DEBUG
|
||||
elif options.verbose:
|
||||
loglevel = logging.INFO
|
||||
else:
|
||||
loglevel = logging.WARNING
|
||||
|
||||
if options.logfile:
|
||||
logging.basicConfig(filename=options.logfile, level=loglevel, format=logformat)
|
||||
else:
|
||||
logging.basicConfig(level=loglevel, format=logformat)
|
||||
|
||||
pbar = Pbar('Test', options.count, enabled=options.progress)
|
||||
|
||||
for idx in range(0, options.count): # pylint: disable=unused-variable
|
||||
pbar.increment()
|
||||
time.sleep(0.3)
|
||||
pbar.finish()
|
191
PgDB.py
191
PgDB.py
|
@ -1,191 +0,0 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import psycopg2
|
||||
import logging
|
||||
import sys
|
||||
import traceback
|
||||
import datetime
|
||||
|
||||
class PgDB(object):
|
||||
|
||||
host = ""
|
||||
user = ""
|
||||
pwd = ""
|
||||
db = ""
|
||||
|
||||
con = 0
|
||||
|
||||
date_format = '%Y-%m-%d'
|
||||
datetime_format = '%Y-%m-%d %H:%M:%S'
|
||||
|
||||
def __init__(self,host,user,pwd,db):
|
||||
self.host = host
|
||||
self.user = user
|
||||
self.pwd = pwd
|
||||
self.db = db
|
||||
|
||||
def connect(self):
|
||||
if self.con == 0:
|
||||
try:
|
||||
con = psycopg2.connect(
|
||||
dbname=self.db,
|
||||
user=self.user,
|
||||
host=self.host,
|
||||
password=self.pwd
|
||||
)
|
||||
self.con = con
|
||||
except Exception:
|
||||
logging.fatal(
|
||||
'An error occured during Postgresql database connection (%s@%s, database=%s).',
|
||||
self.user, self.host, self.db, exc_info=1
|
||||
)
|
||||
sys.exit(1)
|
||||
return True
|
||||
|
||||
def close(self):
|
||||
if self.con:
|
||||
self.con.close()
|
||||
|
||||
def setEncoding(self,enc):
|
||||
if self.con:
|
||||
try:
|
||||
self.con.set_client_encoding(enc)
|
||||
return True
|
||||
except Exception:
|
||||
logging.error('An error occured setting Postgresql database connection encoding to "%s"', enc, exc_info=1)
|
||||
return False
|
||||
|
||||
def doSQL(self,sql,params=None):
|
||||
cursor = self.con.cursor()
|
||||
try:
|
||||
if params is None:
|
||||
cursor.execute(sql)
|
||||
else:
|
||||
cursor.execute(sql,params)
|
||||
self.con.commit()
|
||||
return True
|
||||
except Exception:
|
||||
logging.error(u'Error during SQL request "%s"', sql.decode('utf-8', 'ignore'), exc_info=1)
|
||||
self.con.rollback()
|
||||
return False
|
||||
|
||||
def doSelect(self,sql,params):
|
||||
cursor = self.con.cursor()
|
||||
try:
|
||||
if params is None:
|
||||
cursor.execute(sql)
|
||||
else:
|
||||
cursor.execute(sql,params)
|
||||
results = cursor.fetchall()
|
||||
return results
|
||||
except Exception:
|
||||
logging.error(u'Error during SQL request "%s"', sql.decode('utf-8', 'ignore'), exc_info=1)
|
||||
return False
|
||||
|
||||
#
|
||||
# SQL helpers
|
||||
#
|
||||
def _quote_value(self, value):
|
||||
if isinstance(value, int) or isinstance(value, float):
|
||||
return unicode(value)
|
||||
|
||||
if isinstance(value, str):
|
||||
value = unicode(value)
|
||||
elif isinstance(value, datetime.datetime):
|
||||
value = unicode(self._format_datetime(value))
|
||||
elif isinstance(value, datetime.date):
|
||||
value = unicode(self._format_date(value))
|
||||
|
||||
return u"'%s'" % value.replace(u"'",u"''")
|
||||
|
||||
def _format_where_clauses(self, where_clauses, where_op=u'AND'):
|
||||
if isinstance(where_clauses, str):
|
||||
return where_clauses
|
||||
elif isinstance(where_clauses, list):
|
||||
return (u" %s " % where_op).join(where_clauses)
|
||||
elif isinstance(where_clauses, dict):
|
||||
return (u" %s " % where_op).join(map(lambda x: "%s=%s" % (x, self._quote_value(where_clauses[x])), where_clauses))
|
||||
logging.error('Unsupported where clauses type %s', type(where_clauses))
|
||||
return False
|
||||
|
||||
def _format_datetime(self, datetime):
|
||||
return datetime.strftime(self.datetime_format)
|
||||
|
||||
def _format_date(self, date):
|
||||
return date.strftime(self.date_format)
|
||||
|
||||
def time2datetime(self, time):
|
||||
return self._format_datetime(datetime.fromtimestamp(int(time)))
|
||||
|
||||
def time2date(self, time):
|
||||
return self._format_date(datetime.fromtimestamp(int(time)))
|
||||
|
||||
def insert(self, table, values, just_try=False):
|
||||
sql=u"INSERT INTO %s (%s) VALUES (%s)" % (table, u', '.join(values.keys()), u", ".join(map(lambda x: self._quote_value(values[x]), values)))
|
||||
|
||||
if just_try:
|
||||
logging.debug(u"Just-try mode : execute INSERT query : %s", sql)
|
||||
return True
|
||||
|
||||
logging.debug(sql)
|
||||
if not self.doSQL(sql):
|
||||
logging.error(u"Fail to execute INSERT query (SQL : %s)" % sql)
|
||||
return False
|
||||
return True
|
||||
|
||||
def update(self, table, values, where_clauses, where_op=u'AND', just_try=False):
|
||||
where=self._format_where_clauses(where_clauses, where_op=where_op)
|
||||
if not where:
|
||||
return False
|
||||
|
||||
sql=u"UPDATE %s SET %s WHERE %s" % (table, u", ".join(map(lambda x: "%s=%s" % (x, self._quote_value(values[x])), values)), where)
|
||||
|
||||
if just_try:
|
||||
logging.debug(u"Just-try mode : execute UPDATE query : %s", sql)
|
||||
return True
|
||||
|
||||
logging.debug(sql)
|
||||
if not self.doSQL(sql):
|
||||
logging.error(u"Fail to execute UPDATE query (SQL : %s)", sql)
|
||||
return False
|
||||
return True
|
||||
|
||||
def delete(self, table, where_clauses, where_op=u'AND', just_try=False):
|
||||
where=self._format_where_clauses(where_clauses, where_op=where_op)
|
||||
if not where:
|
||||
return False
|
||||
|
||||
sql=u"DELETE FROM %s WHERE %s" % (table, where)
|
||||
|
||||
if just_try:
|
||||
logging.debug(u"Just-try mode : execute DELETE query : %s", sql)
|
||||
return True
|
||||
|
||||
logging.debug(sql)
|
||||
if not self.doSQL(sql):
|
||||
logging.error(u"Fail to execute DELETE query (SQL : %s)", sql)
|
||||
return False
|
||||
return True
|
||||
|
||||
def select(self, table, where_clauses=None, fields=None, where_op=u'AND', order_by=None, just_try=False):
|
||||
sql = u"SELECT "
|
||||
if fields is None:
|
||||
sql += "*"
|
||||
elif isinstance(fields, str) or isinstance(fields, unicode):
|
||||
sql += fields
|
||||
else:
|
||||
sql += u", ".join(fields)
|
||||
|
||||
sql += u" FROM " + table
|
||||
if where_clauses:
|
||||
where=self._format_where_clauses(where_clauses, where_op=where_op)
|
||||
if not where:
|
||||
return False
|
||||
|
||||
sql += u" WHERE " + where
|
||||
|
||||
if order_by:
|
||||
sql += u"ORDER %s" % order_by
|
||||
|
||||
return self.doSelect(sql)
|
96
README.md
Normal file
96
README.md
Normal file
|
@ -0,0 +1,96 @@
|
|||
# Python MyLib
|
||||
|
||||
Just a set of helpers small libs to make common tasks easier in my script development.
|
||||
|
||||
[![status-badge](https://ci.zionetrix.net/api/badges/bn8/python-mylib/status.svg)](https://ci.zionetrix.net/bn8/python-mylib)
|
||||
|
||||
## Requirements
|
||||
|
||||
```
|
||||
apt install \
|
||||
build-essential \
|
||||
python3 \
|
||||
python3-dev
|
||||
|
||||
# For LDAP:
|
||||
apt install libldap2-dev libsasl2-dev
|
||||
|
||||
# For Config:
|
||||
apt install pkg-config libsystemd-dev
|
||||
|
||||
# For PgSQL:
|
||||
apt install libpq-dev
|
||||
|
||||
# For MySQL:
|
||||
apt install libmariadb-dev
|
||||
```
|
||||
|
||||
## Installation
|
||||
|
||||
### Using pip
|
||||
|
||||
Just run `pip install git+https://gitea.zionetrix.net/bn8/python-mylib.git`
|
||||
|
||||
### From source
|
||||
|
||||
Just run `python setup.py install`
|
||||
|
||||
**Note:** This project could previously use as independent python files (not as module). This old version is keep in _legacy_ git branch (not maintained).
|
||||
|
||||
## Include libs
|
||||
|
||||
- **mylib.email.EmailClient:** An email client to forge (eventually using template) and send email via a SMTP server
|
||||
- **mylib.ldap.LdapServer:** A small lib to make requesting LDAP server easier. It's also provide some helper functions to deal with LDAP date string.
|
||||
- **mylib.mysql.MyDB:** An extra small lib to remember me how to interact with MySQL/MariaDB database
|
||||
- **mylib.pgsql.PgDB:** An small lib to remember me how to interact with PostgreSQL database. **Warning:** The insert/update/delete/select methods demonstrate how to forge raw SQL request, but **it's a bad idea**: Prefer using prepared query.
|
||||
- **mylib.opening_hours:** A set of helper functions to deal with french opening hours (including normal opening hours, exceptional closure and nonworking public holidays).
|
||||
- **mylib.pbar.Pbar:** A small lib for progress bar
|
||||
- **mylib.report.Report:** A small lib to implement logging based email report send at exit
|
||||
|
||||
To know how to use these libs, you can take a look on _mylib.scripts_ content or in _tests_ directory.
|
||||
|
||||
## Code Style
|
||||
|
||||
[pylint](https://pypi.org/project/pylint/) is used to check for errors and enforces a coding standard, using those parameters:
|
||||
|
||||
```bash
|
||||
pylint --extension-pkg-whitelist=cx_Oracle
|
||||
```
|
||||
|
||||
[flake8](https://pypi.org/project/flake8/) is also used to check for errors and enforces a coding standard, using those parameters:
|
||||
|
||||
```bash
|
||||
flake8 --max-line-length=100
|
||||
```
|
||||
|
||||
[black](https://pypi.org/project/black/) is used to format the code, using those parameters:
|
||||
|
||||
```bash
|
||||
black --target-version py37 --line-length 100
|
||||
```
|
||||
|
||||
[isort](https://pypi.org/project/isort/) is used to format the imports, using those parameter:
|
||||
|
||||
```bash
|
||||
isort --profile black --line-length 100
|
||||
```
|
||||
|
||||
[pyupgrade](https://pypi.org/project/pyupgrade/) is used to automatically upgrade syntax, using those parameters:
|
||||
|
||||
```bash
|
||||
pyupgrade --keep-percent-format --py37-plus
|
||||
```
|
||||
|
||||
**Note:** There is `.pre-commit-config.yaml` to use [pre-commit](https://pre-commit.com/) to automatically run these tools before commits. After cloning the repository, execute `pre-commit install` to install the git hook.
|
||||
|
||||
## Copyright
|
||||
|
||||
Copyright (c) 2013-2021 Benjamin Renard <brenard@zionetrix.net>
|
||||
|
||||
## License
|
||||
|
||||
This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License version 3 as published by the Free Software Foundation.
|
||||
|
||||
This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
|
67
Report.py
67
Report.py
|
@ -1,67 +0,0 @@
|
|||
#!/usr/bin/python
|
||||
# coding: utf8
|
||||
|
||||
""" Report """
|
||||
|
||||
import atexit
|
||||
import logging
|
||||
|
||||
|
||||
class Report(object): # pylint: disable=useless-object-inheritance
|
||||
""" Logging report """
|
||||
|
||||
content = []
|
||||
handler = None
|
||||
formatter = None
|
||||
subject = None
|
||||
rcpt_to = None
|
||||
email_client = None
|
||||
|
||||
def __init__(self, loglevel=logging.WARNING, logformat='%(asctime)s - %(levelname)s - %(message)s',
|
||||
subject=None, rcpt_to=None, email_client=None):
|
||||
self.handler = logging.StreamHandler(self)
|
||||
self.handler.setLevel(loglevel)
|
||||
self.formatter = logging.Formatter(logformat)
|
||||
self.handler.setFormatter(self.formatter)
|
||||
|
||||
self.subject = subject
|
||||
self.rcpt_to = rcpt_to
|
||||
self.email_client = email_client
|
||||
|
||||
def get_handler(self):
|
||||
""" Retreive logging handler """
|
||||
return self.handler
|
||||
|
||||
def write(self, msg):
|
||||
""" Write a message """
|
||||
self.content.append(msg)
|
||||
|
||||
def get_content(self):
|
||||
""" Read the report content """
|
||||
return "".join(self.content)
|
||||
|
||||
def send(self, subject=None, rcpt_to=None, email_client=None, just_try=False):
|
||||
""" Send report using an EmailClient """
|
||||
if not self.rcpt_to and not rcpt_to:
|
||||
logging.debug('No report recipient, do not send report')
|
||||
return True
|
||||
assert self.subject or subject, "You must provide report subject using Report.__init__ or Report.send"
|
||||
assert self.email_client or email_client, "You must provide email client using Report.__init__ or Report.send"
|
||||
content = self.get_content()
|
||||
if not content:
|
||||
logging.debug('Report is empty, do not send it')
|
||||
return True
|
||||
msg = email_client.forge_message(
|
||||
self.rcpt_to or rcpt_to,
|
||||
subject=self.subject or subject,
|
||||
text_body=content
|
||||
)
|
||||
if email_client.send(self.rcpt_to or rcpt_to, msg=msg, just_try=just_try):
|
||||
logging.debug('Report sent to %s', self.rcpt_to or rcpt_to)
|
||||
return True
|
||||
logging.error('Fail to send report to %s', self.rcpt_to or rcpt_to)
|
||||
return False
|
||||
|
||||
def send_at_exit(self, **kwargs):
|
||||
""" Send report at exit """
|
||||
atexit.register(self.send, **kwargs)
|
136
build.sh
Executable file
136
build.sh
Executable file
|
@ -0,0 +1,136 @@
|
|||
#!/bin/bash
|
||||
|
||||
QUIET_ARG=""
|
||||
[ "$1" == "--quiet" ] && QUIET_ARG="--quiet"
|
||||
|
||||
# Enter source directory
|
||||
cd $( dirname $0 )
|
||||
|
||||
echo "Clean previous build..."
|
||||
rm -fr dist deb_dist
|
||||
|
||||
if [ -n "$CI" -a $UID -eq 0 ]
|
||||
then
|
||||
echo "CI environment detected, set current directory as git safe for root"
|
||||
git config --global --add safe.directory $(pwd)
|
||||
fi
|
||||
|
||||
echo "Detect version using git describe..."
|
||||
VERSION="$( git describe --tags|sed 's/^[^0-9]*//' )"
|
||||
|
||||
echo "Computing python version..."
|
||||
if [ $( echo "$VERSION"|grep -c "-" ) -gt 0 ]
|
||||
then
|
||||
echo "Development version detected ($VERSION), compute custom python dev version"
|
||||
PY_VERSION="$( echo "$VERSION"|sed 's/-\([0-9]\)\+-.*$/.dev\1/' )"
|
||||
else
|
||||
echo "Clean tagged version detected, use it"
|
||||
PY_VERSION="$VERSION"
|
||||
fi
|
||||
|
||||
echo "Set version=$PY_VERSION in setup.py using sed..."
|
||||
sed -i "s/^version *=.*$/version = '$PY_VERSION'/" setup.py
|
||||
|
||||
if [ -d venv ]
|
||||
then
|
||||
VENV=$( realpath venv )
|
||||
echo "Use existing virtualenv $VENV to install build dependencies"
|
||||
TEMP_VENV=0
|
||||
else
|
||||
VENV=$(mktemp -d)
|
||||
echo "Create a temporary virtualenv in $VENV to install build dependencies..."
|
||||
TEMP_VENV=1
|
||||
python3 -m venv $VENV
|
||||
fi
|
||||
|
||||
echo "Install dependencies in virtualenv using pip..."
|
||||
$VENV/bin/python3 -m pip install stdeb wheel $QUIET_ARG
|
||||
|
||||
echo "Build wheel package..."
|
||||
$VENV/bin/python3 setup.py bdist_wheel
|
||||
|
||||
echo "Check gitdch is installed..."
|
||||
GITDCH=$(which gitdch)
|
||||
set -e
|
||||
if [ -z "$GITDCH" ]
|
||||
then
|
||||
TMP_GITDCH=$(mktemp -d)
|
||||
echo "Temporary install gitdch in $TMP_GITDCH..."
|
||||
git clone $QUIET_ARG https://gitea.zionetrix.net/bn8/gitdch.git $TMP_GITDCH/gitdch
|
||||
GITDCH="$VENV/bin/python3 $TMP_GITDCH/gitdch/gitdch"
|
||||
echo "Install gitdch dependencies in $VENV..."
|
||||
$VENV/bin/python3 -m pip install GitPython $QUIET_ARG
|
||||
else
|
||||
TMP_GITDCH=""
|
||||
echo "Use existing installation of gitdch ($GITDCH)"
|
||||
fi
|
||||
|
||||
echo "Build debian source package using stdeb sdist_dsc command..."
|
||||
$VENV/bin/python3 setup.py --command-packages=stdeb.command sdist_dsc \
|
||||
--package3 "python3-mylib" \
|
||||
--maintainer "Benjamin Renard <brenard@zionetrix.net>" \
|
||||
--compat 10 \
|
||||
--section net \
|
||||
--forced-upstream-version "$VERSION"
|
||||
|
||||
echo "Keep only debian package directory and orig.tar.gz archive..."
|
||||
find deb_dist/ -maxdepth 1 -type f ! -name '*.orig.tar.gz' -delete
|
||||
|
||||
echo "Enter in debian package directory..."
|
||||
cd deb_dist/mylib-$VERSION
|
||||
|
||||
if [ -z "$DEBIAN_CODENAME" ]
|
||||
then
|
||||
echo "Retrieve debian codename using lsb_release..."
|
||||
DEBIAN_CODENAME=$( lsb_release -c -s )
|
||||
[ $( lsb_release -r -s ) -ge 9 ] && DEBIAN_CODENAME="${DEBIAN_CODENAME}-ee"
|
||||
else
|
||||
echo "Use debian codename from environment ($DEBIAN_CODENAME)"
|
||||
fi
|
||||
|
||||
# Compute debian package version
|
||||
DEB_VERSION_SUFFIX="-1"
|
||||
DEB_VERSION="$VERSION$DEB_VERSION_SUFFIX"
|
||||
|
||||
echo "Generate debian changelog using gitdch..."
|
||||
GITDCH_ARGS=('--verbose')
|
||||
[ -n "$QUIET_ARG" ] && GITDCH_ARGS=('--warning')
|
||||
if [ -n "$MAINTAINER_NAME" ]
|
||||
then
|
||||
echo "Use maintainer name from environment ($MAINTAINER_NAME)"
|
||||
GITDCH_ARGS+=("--maintainer-name" "${MAINTAINER_NAME}")
|
||||
fi
|
||||
if [ -n "$MAINTAINER_EMAIL" ]
|
||||
then
|
||||
echo "Use maintainer email from environment ($MAINTAINER_EMAIL)"
|
||||
GITDCH_ARGS+=("--maintainer-email" "$MAINTAINER_EMAIL")
|
||||
fi
|
||||
$GITDCH \
|
||||
--package-name mylib \
|
||||
--version "${DEB_VERSION}" \
|
||||
--version-suffix "${DEB_VERSION_SUFFIX}" \
|
||||
--code-name $DEBIAN_CODENAME \
|
||||
--output debian/changelog \
|
||||
--release-notes ../../dist/release_notes.md \
|
||||
--path ../../ \
|
||||
--exclude "^CI: " \
|
||||
--exclude "^Docker: " \
|
||||
--exclude "^pre-commit: " \
|
||||
--exclude "\.?woodpecker(\.yml)?" \
|
||||
--exclude "build(\.sh)?" \
|
||||
--exclude "tests(\.sh)?" \
|
||||
--exclude "README(\.md)?" \
|
||||
--exclude "^Merge branch " \
|
||||
"${GITDCH_ARGS[@]}"
|
||||
|
||||
echo "Add custom package name for dependencies..."
|
||||
cat << EOF > debian/py3dist-overrides
|
||||
cx_oracle python3-cx-oracle
|
||||
EOF
|
||||
|
||||
[ $TEMP_VENV -eq 1 ] && echo "Clean temporary virtualenv..." && rm -fr $VENV
|
||||
|
||||
[ -n "$TMP_GITDCH" ] && echo "Clean temporary gitdch installation..." && rm -fr $TMP_GITDCH
|
||||
|
||||
echo "Build debian package..."
|
||||
dpkg-buildpackage
|
6
docker/dev-master/Dockerfile
Normal file
6
docker/dev-master/Dockerfile
Normal file
|
@ -0,0 +1,6 @@
|
|||
FROM brenard/mylib:latest
|
||||
RUN apt-get remove -y python3-mylib && \
|
||||
git clone https://gitea.zionetrix.net/bn8/python-mylib.git /src && \
|
||||
pip install --break-system-packages /src[dev] && \
|
||||
cd /src && \
|
||||
pre-commit run --all-files
|
26
docker/latest/Dockerfile
Normal file
26
docker/latest/Dockerfile
Normal file
|
@ -0,0 +1,26 @@
|
|||
FROM node:16-bookworm-slim
|
||||
RUN echo "deb http://debian.zionetrix.net stable main" > /etc/apt/sources.list.d/zionetrix.list && \
|
||||
apt-get \
|
||||
-o Acquire::AllowInsecureRepositories=true \
|
||||
-o Acquire::AllowDowngradeToInsecureRepositories=true \
|
||||
update && \
|
||||
apt-get \
|
||||
-o APT::Get::AllowUnauthenticated=true \
|
||||
install --yes zionetrix-archive-keyring && \
|
||||
apt-get update && \
|
||||
apt-get upgrade -y && \
|
||||
apt-get install -y \
|
||||
python3-all python3-dev python3-pip python3-venv python3-mylib build-essential git \
|
||||
libldap2-dev libsasl2-dev \
|
||||
pkg-config libsystemd-dev \
|
||||
libpq-dev libmariadb-dev \
|
||||
wget unzip && \
|
||||
apt-get clean && \
|
||||
rm -fr rm -rf /var/lib/apt/lists/*
|
||||
RUN python3 -m pip install --break-system-packages pylint pytest flake8 flake8-junit-report pylint-junit junitparser pre-commit
|
||||
RUN wget --no-verbose \
|
||||
-O /opt/instantclient-basic-linux.x64-21.4.0.0.0dbru.zip \
|
||||
https://download.oracle.com/otn_software/linux/instantclient/214000/instantclient-basic-linux.x64-21.4.0.0.0dbru.zip && \
|
||||
unzip -qq -d /opt /opt/instantclient-basic-linux.x64-21.4.0.0.0dbru.zip && \
|
||||
echo /opt/instantclient_* > /etc/ld.so.conf.d/oracle-instantclient.conf && \
|
||||
ldconfig
|
87
mylib/__init__.py
Normal file
87
mylib/__init__.py
Normal file
|
@ -0,0 +1,87 @@
|
|||
""" Some really common helper functions """
|
||||
|
||||
#
|
||||
# Pretty formatting helpers
|
||||
#
|
||||
|
||||
|
||||
def increment_prefix(prefix):
|
||||
"""Increment the given prefix with two spaces"""
|
||||
return f'{prefix if prefix else " "} '
|
||||
|
||||
|
||||
def pretty_format_value(value, encoding="utf8", prefix=None):
|
||||
"""Returned pretty formatted value to display"""
|
||||
if isinstance(value, dict):
|
||||
return pretty_format_dict(value, encoding=encoding, prefix=prefix)
|
||||
if isinstance(value, list):
|
||||
return pretty_format_list(value, encoding=encoding, prefix=prefix)
|
||||
if isinstance(value, bytes):
|
||||
return f"'{value.decode(encoding, errors='replace')}'"
|
||||
if isinstance(value, str):
|
||||
return f"'{value}'"
|
||||
if value is None:
|
||||
return "None"
|
||||
return f"{value} ({type(value)})"
|
||||
|
||||
|
||||
def pretty_format_value_in_list(value, encoding="utf8", prefix=None):
|
||||
"""
|
||||
Returned pretty formatted value to display in list
|
||||
|
||||
That method will prefix value with line return and incremented prefix
|
||||
if pretty formatted value contains line return.
|
||||
"""
|
||||
prefix = prefix if prefix else ""
|
||||
value = pretty_format_value(value, encoding, prefix)
|
||||
if "\n" in value:
|
||||
inc_prefix = increment_prefix(prefix)
|
||||
value = "\n" + "\n".join([inc_prefix + line for line in value.split("\n")])
|
||||
return value
|
||||
|
||||
|
||||
def pretty_format_dict(value, encoding="utf8", prefix=None):
|
||||
"""Returned pretty formatted dict to display"""
|
||||
prefix = prefix if prefix else ""
|
||||
result = []
|
||||
for key in sorted(value.keys()):
|
||||
result.append(
|
||||
f"{prefix}- {key} : "
|
||||
+ pretty_format_value_in_list(value[key], encoding=encoding, prefix=prefix)
|
||||
)
|
||||
return "\n".join(result)
|
||||
|
||||
|
||||
def pretty_format_list(row, encoding="utf8", prefix=None):
|
||||
"""Returned pretty formatted list to display"""
|
||||
prefix = prefix if prefix else ""
|
||||
result = []
|
||||
for idx, values in enumerate(row):
|
||||
result.append(
|
||||
f"{prefix}- #{idx} : "
|
||||
+ pretty_format_value_in_list(values, encoding=encoding, prefix=prefix)
|
||||
)
|
||||
return "\n".join(result)
|
||||
|
||||
|
||||
def pretty_format_timedelta(timedelta):
|
||||
"""Format timedelta object"""
|
||||
seconds = int(timedelta.total_seconds())
|
||||
if seconds < 1:
|
||||
return "less than one second"
|
||||
periods = [
|
||||
("year", 60 * 60 * 24 * 365),
|
||||
("month", 60 * 60 * 24 * 30),
|
||||
("day", 60 * 60 * 24),
|
||||
("hour", 60 * 60),
|
||||
("minute", 60),
|
||||
("second", 1),
|
||||
]
|
||||
|
||||
strings = []
|
||||
for period_name, period_seconds in periods:
|
||||
if seconds >= period_seconds:
|
||||
period_value, seconds = divmod(seconds, period_seconds)
|
||||
strings.append(f'{period_value} {period_name}{"s" if period_value > 1 else ""}')
|
||||
|
||||
return ", ".join(strings)
|
1445
mylib/config.py
Normal file
1445
mylib/config.py
Normal file
File diff suppressed because it is too large
Load diff
428
mylib/db.py
Normal file
428
mylib/db.py
Normal file
|
@ -0,0 +1,428 @@
|
|||
""" Basic SQL DB client """
|
||||
|
||||
import logging
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
#
|
||||
# Exceptions
|
||||
#
|
||||
|
||||
|
||||
class DBException(Exception):
|
||||
"""That is the base exception class for all the other exceptions provided by this module."""
|
||||
|
||||
def __init__(self, error, *args, **kwargs):
|
||||
for arg, value in kwargs.items():
|
||||
setattr(self, arg, value)
|
||||
super().__init__(error.format(*args, **kwargs))
|
||||
|
||||
|
||||
class DBNotImplemented(DBException, RuntimeError):
|
||||
"""
|
||||
Raised when calling a method not implemented in child class
|
||||
"""
|
||||
|
||||
def __init__(self, method, class_name):
|
||||
super().__init__(
|
||||
"The method {method} is not yet implemented in class {class_name}",
|
||||
method=method,
|
||||
class_name=class_name,
|
||||
)
|
||||
|
||||
|
||||
class DBFailToConnect(DBException, RuntimeError):
|
||||
"""
|
||||
Raised on connecting error occurred
|
||||
"""
|
||||
|
||||
def __init__(self, uri):
|
||||
super().__init__("An error occurred during database connection ({uri})", uri=uri)
|
||||
|
||||
|
||||
class DBDuplicatedSQLParameter(DBException, KeyError):
|
||||
"""
|
||||
Raised when trying to set a SQL query parameter
|
||||
and an other parameter with the same name is already set
|
||||
"""
|
||||
|
||||
def __init__(self, parameter_name):
|
||||
super().__init__(
|
||||
"Duplicated SQL parameter '{parameter_name}'", parameter_name=parameter_name
|
||||
)
|
||||
|
||||
|
||||
class DBUnsupportedWHEREClauses(DBException, TypeError):
|
||||
"""
|
||||
Raised when trying to execute query with unsupported
|
||||
WHERE clauses provided
|
||||
"""
|
||||
|
||||
def __init__(self, where_clauses):
|
||||
super().__init__("Unsupported WHERE clauses: {where_clauses}", where_clauses=where_clauses)
|
||||
|
||||
|
||||
class DBInvalidOrderByClause(DBException, TypeError):
|
||||
"""
|
||||
Raised when trying to select on table with invalid
|
||||
ORDER BY clause provided
|
||||
"""
|
||||
|
||||
def __init__(self, order_by):
|
||||
super().__init__(
|
||||
"Invalid ORDER BY clause: {order_by}. Must be a string or a list of two values"
|
||||
" (ordering field name and direction)",
|
||||
order_by=order_by,
|
||||
)
|
||||
|
||||
|
||||
class DBInvalidLimitClause(DBException, TypeError):
|
||||
"""
|
||||
Raised when trying to select on table with invalid
|
||||
LIMIT clause provided
|
||||
"""
|
||||
|
||||
def __init__(self, limit):
|
||||
super().__init__(
|
||||
"Invalid LIMIT clause: {limit}. Must be a non-zero positive integer.",
|
||||
limit=limit,
|
||||
)
|
||||
|
||||
|
||||
class DB:
|
||||
"""Database client"""
|
||||
|
||||
just_try = False
|
||||
|
||||
def __init__(self, just_try=False, **kwargs):
|
||||
self.just_try = just_try
|
||||
self._conn = None
|
||||
for arg, value in kwargs.items():
|
||||
setattr(self, f"_{arg}", value)
|
||||
|
||||
def connect(self, exit_on_error=True):
|
||||
"""Connect to DB server"""
|
||||
raise DBNotImplemented("connect", self.__class__.__name__)
|
||||
|
||||
def close(self):
|
||||
"""Close connection with DB server (if opened)"""
|
||||
if self._conn:
|
||||
self._conn.close()
|
||||
self._conn = None
|
||||
|
||||
@staticmethod
|
||||
def _log_query(sql, params):
|
||||
log.debug(
|
||||
'Run SQL query "%s" %s',
|
||||
sql,
|
||||
"with params = {}".format( # pylint: disable=consider-using-f-string
|
||||
", ".join([f"{key} = {value}" for key, value in params.items()])
|
||||
if params
|
||||
else "without params"
|
||||
),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _log_query_exception(sql, params):
|
||||
log.exception(
|
||||
'Error during SQL query "%s" %s',
|
||||
sql,
|
||||
"with params = {}".format( # pylint: disable=consider-using-f-string
|
||||
", ".join([f"{key} = {value}" for key, value in params.items()])
|
||||
if params
|
||||
else "without params"
|
||||
),
|
||||
)
|
||||
|
||||
def doSQL(self, sql, params=None):
|
||||
"""
|
||||
Run SQL query and commit changes (rollback on error)
|
||||
|
||||
:param sql: The SQL query
|
||||
:param params: The SQL query's parameters as dict (optional)
|
||||
|
||||
:return: True on success, False otherwise
|
||||
:rtype: bool
|
||||
"""
|
||||
raise DBNotImplemented("doSQL", self.__class__.__name__)
|
||||
|
||||
def doSelect(self, sql, params=None):
|
||||
"""
|
||||
Run SELECT SQL query and return list of selected rows as dict
|
||||
|
||||
:param sql: The SQL query
|
||||
:param params: The SQL query's parameters as dict (optional)
|
||||
|
||||
:return: List of selected rows as dict on success, False otherwise
|
||||
:rtype: list, bool
|
||||
"""
|
||||
raise DBNotImplemented("doSelect", self.__class__.__name__)
|
||||
|
||||
#
|
||||
# SQL helpers
|
||||
#
|
||||
|
||||
@staticmethod
|
||||
def _quote_table_name(table):
|
||||
"""Quote table name"""
|
||||
return '"{}"'.format( # pylint: disable=consider-using-f-string
|
||||
'"."'.join(table.split("."))
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _quote_field_name(field):
|
||||
"""Quote table name"""
|
||||
return f'"{field}"'
|
||||
|
||||
@staticmethod
|
||||
def format_param(param):
|
||||
"""Format SQL query parameter for prepared query"""
|
||||
return f"%({param})s"
|
||||
|
||||
@classmethod
|
||||
def _combine_params(cls, params, to_add=None, **kwargs):
|
||||
if to_add:
|
||||
assert isinstance(to_add, dict), "to_add must be a dict or None"
|
||||
params = cls._combine_params(params, **to_add)
|
||||
|
||||
for param, value in kwargs.items():
|
||||
if param in params:
|
||||
raise DBDuplicatedSQLParameter(param)
|
||||
params[param] = value
|
||||
return params
|
||||
|
||||
@staticmethod
|
||||
def _get_unique_param_name(field, params):
|
||||
"""Return a unique parameter name based on specified field name"""
|
||||
param = field
|
||||
if field in params:
|
||||
idx = 1
|
||||
while param in params:
|
||||
param = f"{field}_{idx}"
|
||||
idx += 1
|
||||
return param
|
||||
|
||||
@classmethod
|
||||
def _format_where_clauses(cls, where_clauses, params=None, where_op=None):
|
||||
"""
|
||||
Format WHERE clauses
|
||||
|
||||
:param where_clauses: The WHERE clauses. Could be:
|
||||
- a raw SQL WHERE clause as string
|
||||
- a tuple of two elements: a raw WHERE clause and its parameters as dict
|
||||
- a dict of WHERE clauses with field name as key and WHERE clause value as value
|
||||
- a list of any of previous valid WHERE clauses
|
||||
:param params: Dict of other already set SQL query parameters (optional)
|
||||
:param where_op: SQL operator used to combine WHERE clauses together (optional, default:
|
||||
AND)
|
||||
|
||||
:return: A tuple of two elements: raw SQL WHERE combined clauses and parameters on success
|
||||
:rtype: string, bool
|
||||
"""
|
||||
if params is None:
|
||||
params = {}
|
||||
if where_op is None:
|
||||
where_op = "AND"
|
||||
|
||||
if isinstance(where_clauses, str):
|
||||
return (where_clauses, params)
|
||||
|
||||
if (
|
||||
isinstance(where_clauses, tuple)
|
||||
and len(where_clauses) == 2
|
||||
and isinstance(where_clauses[1], dict)
|
||||
):
|
||||
cls._combine_params(params, where_clauses[1])
|
||||
return (where_clauses[0], params)
|
||||
|
||||
if isinstance(where_clauses, (list, tuple)):
|
||||
sql_where_clauses = []
|
||||
for where_clause in where_clauses:
|
||||
sql2, params = cls._format_where_clauses(
|
||||
where_clause, params=params, where_op=where_op
|
||||
)
|
||||
sql_where_clauses.append(sql2)
|
||||
return (f" {where_op} ".join(sql_where_clauses), params)
|
||||
|
||||
if isinstance(where_clauses, dict):
|
||||
sql_where_clauses = []
|
||||
for field, value in where_clauses.items():
|
||||
if isinstance(value, list):
|
||||
param_names = []
|
||||
for idx, v in enumerate(value):
|
||||
param = cls._get_unique_param_name(f"{field}_{idx}", params)
|
||||
cls._combine_params(params, **{param: v})
|
||||
param_names.append(param)
|
||||
sql_where_clauses.append(
|
||||
f"{cls._quote_field_name(field)} IN "
|
||||
f"({', '.join([cls.format_param(param) for param in param_names])})"
|
||||
)
|
||||
else:
|
||||
param = cls._get_unique_param_name(field, params)
|
||||
cls._combine_params(params, **{param: value})
|
||||
sql_where_clauses.append(
|
||||
f"{cls._quote_field_name(field)} = {cls.format_param(param)}"
|
||||
)
|
||||
return (f" {where_op} ".join(sql_where_clauses), params)
|
||||
raise DBUnsupportedWHEREClauses(where_clauses)
|
||||
|
||||
@classmethod
|
||||
def _add_where_clauses(cls, sql, params, where_clauses, where_op=None):
|
||||
"""
|
||||
Add WHERE clauses to an SQL query
|
||||
|
||||
:param sql: The SQL query to complete
|
||||
:param params: The dict of parameters of the SQL query to complete
|
||||
:param where_clauses: The WHERE clause (see _format_where_clauses())
|
||||
:param where_op: SQL operator used to combine WHERE clauses together (optional, default:
|
||||
see _format_where_clauses())
|
||||
|
||||
:return:
|
||||
:rtype: A tuple of two elements: raw SQL WHERE combined clauses and parameters
|
||||
"""
|
||||
if where_clauses:
|
||||
sql_where, params = cls._format_where_clauses(
|
||||
where_clauses, params=params, where_op=where_op
|
||||
)
|
||||
sql += " WHERE " + sql_where
|
||||
return (sql, params)
|
||||
|
||||
def insert(self, table, values, just_try=False):
|
||||
"""Run INSERT SQL query"""
|
||||
# pylint: disable=consider-using-f-string
|
||||
sql = "INSERT INTO {} ({}) VALUES ({})".format( # nosec
|
||||
self._quote_table_name(table),
|
||||
", ".join([self._quote_field_name(field) for field in values.keys()]),
|
||||
", ".join([self.format_param(key) for key in values]),
|
||||
)
|
||||
|
||||
if just_try:
|
||||
log.debug("Just-try mode: execute INSERT query: %s", sql)
|
||||
return True
|
||||
|
||||
log.debug(sql)
|
||||
if not self.doSQL(sql, params=values):
|
||||
log.error("Fail to execute INSERT query (SQL: %s)", sql)
|
||||
return False
|
||||
return True
|
||||
|
||||
def update(self, table, values, where_clauses, where_op=None, just_try=False):
|
||||
"""Run UPDATE SQL query"""
|
||||
# pylint: disable=consider-using-f-string
|
||||
sql = "UPDATE {} SET {}".format( # nosec
|
||||
self._quote_table_name(table),
|
||||
", ".join(
|
||||
[f"{self._quote_field_name(key)} = {self.format_param(key)}" for key in values]
|
||||
),
|
||||
)
|
||||
params = values
|
||||
|
||||
try:
|
||||
sql, params = self._add_where_clauses(sql, params, where_clauses, where_op=where_op)
|
||||
except (DBDuplicatedSQLParameter, DBUnsupportedWHEREClauses):
|
||||
log.error("Fail to add WHERE clauses", exc_info=True)
|
||||
return False
|
||||
|
||||
if just_try:
|
||||
log.debug("Just-try mode: execute UPDATE query: %s", sql)
|
||||
return True
|
||||
|
||||
log.debug(sql)
|
||||
if not self.doSQL(sql, params=params):
|
||||
log.error("Fail to execute UPDATE query (SQL: %s)", sql)
|
||||
return False
|
||||
return True
|
||||
|
||||
def delete(self, table, where_clauses, where_op="AND", just_try=False):
|
||||
"""Run DELETE SQL query"""
|
||||
sql = f"DELETE FROM {self._quote_table_name(table)}" # nosec
|
||||
params = {}
|
||||
|
||||
try:
|
||||
sql, params = self._add_where_clauses(sql, params, where_clauses, where_op=where_op)
|
||||
except (DBDuplicatedSQLParameter, DBUnsupportedWHEREClauses):
|
||||
log.error("Fail to add WHERE clauses", exc_info=True)
|
||||
return False
|
||||
|
||||
if just_try:
|
||||
log.debug("Just-try mode: execute UPDATE query: %s", sql)
|
||||
return True
|
||||
|
||||
log.debug(sql)
|
||||
if not self.doSQL(sql, params=params):
|
||||
log.error("Fail to execute UPDATE query (SQL: %s)", sql)
|
||||
return False
|
||||
return True
|
||||
|
||||
def truncate(self, table, just_try=False):
|
||||
"""Run TRUNCATE SQL query"""
|
||||
sql = f"TRUNCATE TABLE {self._quote_table_name(table)}" # nosec
|
||||
|
||||
if just_try:
|
||||
log.debug("Just-try mode: execute TRUNCATE query: %s", sql)
|
||||
return True
|
||||
|
||||
log.debug(sql)
|
||||
if not self.doSQL(sql):
|
||||
log.error("Fail to execute TRUNCATE query (SQL: %s)", sql)
|
||||
return False
|
||||
return True
|
||||
|
||||
def select(
|
||||
self,
|
||||
table,
|
||||
where_clauses=None,
|
||||
fields=None,
|
||||
where_op="AND",
|
||||
order_by=None,
|
||||
limit=None,
|
||||
just_try=False,
|
||||
):
|
||||
"""Run SELECT SQL query"""
|
||||
sql = "SELECT "
|
||||
if fields is None:
|
||||
sql += "*"
|
||||
elif isinstance(fields, str):
|
||||
sql += f"{self._quote_field_name(fields)}"
|
||||
else:
|
||||
sql += ", ".join([self._quote_field_name(field) for field in fields])
|
||||
|
||||
sql += f" FROM {self._quote_table_name(table)}"
|
||||
params = {}
|
||||
|
||||
try:
|
||||
sql, params = self._add_where_clauses(sql, params, where_clauses, where_op=where_op)
|
||||
except (DBDuplicatedSQLParameter, DBUnsupportedWHEREClauses):
|
||||
log.error("Fail to add WHERE clauses", exc_info=True)
|
||||
return False
|
||||
|
||||
if order_by:
|
||||
if isinstance(order_by, str):
|
||||
sql += f" ORDER BY {order_by}"
|
||||
elif (
|
||||
isinstance(order_by, (list, tuple))
|
||||
and len(order_by) == 2
|
||||
and isinstance(order_by[0], str)
|
||||
and isinstance(order_by[1], str)
|
||||
and order_by[1].upper() in ("ASC", "UPPER")
|
||||
):
|
||||
sql += f' ORDER BY "{order_by[0]}" {order_by[1].upper()}'
|
||||
else:
|
||||
raise DBInvalidOrderByClause(order_by)
|
||||
|
||||
if limit:
|
||||
if not isinstance(limit, int):
|
||||
try:
|
||||
limit = int(limit)
|
||||
except ValueError as err:
|
||||
raise DBInvalidLimitClause(limit) from err
|
||||
if limit <= 0:
|
||||
raise DBInvalidLimitClause(limit)
|
||||
sql += f" LIMIT {limit}"
|
||||
|
||||
if just_try:
|
||||
log.debug("Just-try mode: execute SELECT query : %s", sql)
|
||||
return just_try
|
||||
|
||||
return self.doSelect(sql, params=params)
|
435
mylib/email.py
Normal file
435
mylib/email.py
Normal file
|
@ -0,0 +1,435 @@
|
|||
""" Email client to forge and send emails """
|
||||
|
||||
import base64
|
||||
import email.utils
|
||||
import logging
|
||||
import os
|
||||
import smtplib
|
||||
from email.encoders import encode_base64
|
||||
from email.mime.base import MIMEBase
|
||||
from email.mime.multipart import MIMEMultipart
|
||||
from email.mime.text import MIMEText
|
||||
|
||||
import magic
|
||||
from mako.template import Template as MakoTemplate
|
||||
|
||||
from mylib.config import (
|
||||
BooleanOption,
|
||||
ConfigurableObject,
|
||||
IntegerOption,
|
||||
PasswordOption,
|
||||
StringOption,
|
||||
)
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def load_image_as_base64(path):
|
||||
"""Load image file as base64"""
|
||||
log.debug("Load image file '%s'", path)
|
||||
with open(path, "rb") as file_desc:
|
||||
data = file_desc.read()
|
||||
return f"data:{magic.from_buffer(data, mime=True)};base64, {base64.b64encode(data).decode()}"
|
||||
|
||||
|
||||
class EmailClient(
|
||||
ConfigurableObject
|
||||
): # pylint: disable=useless-object-inheritance,too-many-instance-attributes
|
||||
"""
|
||||
Email client
|
||||
|
||||
This class abstract all interactions with the SMTP server.
|
||||
"""
|
||||
|
||||
_config_name = "email"
|
||||
_config_comment = "Email"
|
||||
_defaults = {
|
||||
"smtp_host": "localhost",
|
||||
"smtp_port": 25,
|
||||
"smtp_ssl": False,
|
||||
"smtp_tls": False,
|
||||
"smtp_user": None,
|
||||
"smtp_password": None,
|
||||
"smtp_debug": False,
|
||||
"sender_name": "No reply",
|
||||
"sender_email": "noreply@localhost",
|
||||
"encoding": "utf-8",
|
||||
"catch_all_addr": None,
|
||||
"just_try": False,
|
||||
"templates_path": None,
|
||||
}
|
||||
|
||||
templates = {}
|
||||
|
||||
def __init__(self, templates=None, initialize=False, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
|
||||
assert templates is None or isinstance(templates, dict)
|
||||
self.templates = templates if templates else {}
|
||||
if initialize:
|
||||
self.initialize()
|
||||
|
||||
# pylint: disable=arguments-differ,arguments-renamed
|
||||
def configure(self, use_smtp=True, **kwargs):
|
||||
"""Configure options on registered mylib.Config object"""
|
||||
section = super().configure(
|
||||
just_try_help=kwargs.pop("just_try_help", "Just-try mode: do not really send emails"),
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
if use_smtp:
|
||||
section.add_option(
|
||||
StringOption,
|
||||
"smtp_host",
|
||||
default=self._defaults["smtp_host"],
|
||||
comment="SMTP server hostname/IP address",
|
||||
)
|
||||
section.add_option(
|
||||
IntegerOption,
|
||||
"smtp_port",
|
||||
default=self._defaults["smtp_port"],
|
||||
comment="SMTP server port",
|
||||
)
|
||||
section.add_option(
|
||||
BooleanOption,
|
||||
"smtp_ssl",
|
||||
default=self._defaults["smtp_ssl"],
|
||||
comment="Use SSL on SMTP server connection",
|
||||
)
|
||||
section.add_option(
|
||||
BooleanOption,
|
||||
"smtp_tls",
|
||||
default=self._defaults["smtp_tls"],
|
||||
comment="Use TLS on SMTP server connection",
|
||||
)
|
||||
section.add_option(
|
||||
StringOption,
|
||||
"smtp_user",
|
||||
default=self._defaults["smtp_user"],
|
||||
comment="SMTP authentication username",
|
||||
)
|
||||
section.add_option(
|
||||
PasswordOption,
|
||||
"smtp_password",
|
||||
default=self._defaults["smtp_password"],
|
||||
comment='SMTP authentication password (set to "keyring" to use XDG keyring)',
|
||||
username_option="smtp_user",
|
||||
keyring_value="keyring",
|
||||
)
|
||||
section.add_option(
|
||||
BooleanOption,
|
||||
"smtp_debug",
|
||||
default=self._defaults["smtp_debug"],
|
||||
comment="Enable SMTP debugging",
|
||||
)
|
||||
|
||||
section.add_option(
|
||||
StringOption,
|
||||
"sender_name",
|
||||
default=self._defaults["sender_name"],
|
||||
comment="Sender name",
|
||||
)
|
||||
section.add_option(
|
||||
StringOption,
|
||||
"sender_email",
|
||||
default=self._defaults["sender_email"],
|
||||
comment="Sender email address",
|
||||
)
|
||||
section.add_option(
|
||||
StringOption, "encoding", default=self._defaults["encoding"], comment="Email encoding"
|
||||
)
|
||||
section.add_option(
|
||||
StringOption,
|
||||
"catch_all_addr",
|
||||
default=self._defaults["catch_all_addr"],
|
||||
comment="Catch all sent emails to this specified email address",
|
||||
)
|
||||
|
||||
section.add_option(
|
||||
StringOption,
|
||||
"templates_path",
|
||||
default=self._defaults["templates_path"],
|
||||
comment="Path to templates directory",
|
||||
)
|
||||
|
||||
return section
|
||||
|
||||
def initialize(self, *args, **kwargs): # pylint: disable=arguments-differ
|
||||
"""Configuration initialized hook"""
|
||||
super().initialize(*args, **kwargs)
|
||||
self.load_templates_directory()
|
||||
|
||||
def load_templates_directory(self, templates_path=None):
|
||||
"""Load templates from specified directory"""
|
||||
if templates_path is None:
|
||||
templates_path = self._get_option("templates_path")
|
||||
if not templates_path:
|
||||
return
|
||||
log.debug("Load email templates from %s directory", templates_path)
|
||||
for filename in os.listdir(templates_path):
|
||||
filepath = os.path.join(templates_path, filename)
|
||||
if not os.path.isfile(filepath):
|
||||
continue
|
||||
template_name, template_type = os.path.splitext(filename)
|
||||
if template_type not in [".html", ".txt", ".subject"]:
|
||||
continue
|
||||
template_type = "text" if template_type == ".txt" else template_type[1:]
|
||||
if template_name not in self.templates:
|
||||
self.templates[template_name] = {"path": templates_path}
|
||||
log.debug("Load email template %s %s from %s", template_name, template_type, filepath)
|
||||
with open(filepath, encoding="utf8") as file_desc:
|
||||
self.templates[template_name][template_type] = MakoTemplate(
|
||||
file_desc.read()
|
||||
) # nosec
|
||||
|
||||
def forge_message(
|
||||
self,
|
||||
recipients,
|
||||
subject=None,
|
||||
html_body=None,
|
||||
text_body=None, # pylint: disable=too-many-arguments,too-many-locals
|
||||
attachment_files=None,
|
||||
attachment_payloads=None,
|
||||
sender_name=None,
|
||||
sender_email=None,
|
||||
encoding=None,
|
||||
template=None,
|
||||
cc=None,
|
||||
**template_vars,
|
||||
):
|
||||
"""
|
||||
Forge a message
|
||||
|
||||
:param recipients: The recipient(s) of the email. List of tuple(name, email) or
|
||||
just the email of the recipients.
|
||||
:param subject: The subject of the email.
|
||||
:param html_body: The HTML body of the email
|
||||
:param text_body: The plain text body of the email
|
||||
:param attachment_files: List of filepaths to attach
|
||||
:param attachment_payloads: List of tuples with filename and payload to attach
|
||||
:param sender_name: Custom sender name (default: as defined on initialization)
|
||||
:param sender_email: Custom sender email (default: as defined on initialization)
|
||||
:param encoding: Email content encoding (default: as defined on initialization)
|
||||
:param template: The name of a template to use to forge this email
|
||||
:param cc: Optional list of CC recipient addresses.
|
||||
List of tuple(name, email) or just the email of the recipients.
|
||||
|
||||
All other parameters will be consider as template variables.
|
||||
"""
|
||||
recipients = [recipients] if not isinstance(recipients, list) else recipients
|
||||
msg = MIMEMultipart("alternative")
|
||||
msg["To"] = ", ".join(
|
||||
[
|
||||
email.utils.formataddr(recipient) if isinstance(recipient, tuple) else recipient
|
||||
for recipient in recipients
|
||||
]
|
||||
)
|
||||
|
||||
if cc:
|
||||
cc = [cc] if not isinstance(cc, list) else cc
|
||||
msg["Cc"] = ", ".join(
|
||||
[
|
||||
email.utils.formataddr(recipient) if isinstance(recipient, tuple) else recipient
|
||||
for recipient in cc
|
||||
]
|
||||
)
|
||||
|
||||
msg["From"] = email.utils.formataddr(
|
||||
(
|
||||
sender_name or self._get_option("sender_name"),
|
||||
sender_email or self._get_option("sender_email"),
|
||||
)
|
||||
)
|
||||
if subject:
|
||||
msg["Subject"] = (
|
||||
subject.render(**template_vars)
|
||||
if isinstance(subject, MakoTemplate)
|
||||
else subject.format(**template_vars)
|
||||
)
|
||||
msg["Date"] = email.utils.formatdate(None, True)
|
||||
encoding = encoding if encoding else self._get_option("encoding")
|
||||
if template:
|
||||
log.debug("Forge email from template %s", template)
|
||||
assert template in self.templates, f"Unknown template {template}"
|
||||
# Handle subject from template
|
||||
if not subject:
|
||||
assert self.templates[template].get(
|
||||
"subject"
|
||||
), f"No subject defined in template {template}"
|
||||
msg["Subject"] = (
|
||||
self.templates[template]["subject"].render(**template_vars)
|
||||
if isinstance(self.templates[template]["subject"], MakoTemplate)
|
||||
else self.templates[template]["subject"].format(**template_vars)
|
||||
)
|
||||
|
||||
# Put HTML part in last one to preferred it
|
||||
parts = []
|
||||
if self.templates[template].get("text"):
|
||||
if isinstance(self.templates[template]["text"], MakoTemplate):
|
||||
parts.append(
|
||||
(self.templates[template]["text"].render(**template_vars), "plain")
|
||||
)
|
||||
else:
|
||||
parts.append(
|
||||
(self.templates[template]["text"].format(**template_vars), "plain")
|
||||
)
|
||||
if self.templates[template].get("html"):
|
||||
if isinstance(self.templates[template]["html"], MakoTemplate):
|
||||
template_vars["load_image_as_base64"] = self.template_image_loader(
|
||||
self.templates[template].get("path")
|
||||
)
|
||||
parts.append((self.templates[template]["html"].render(**template_vars), "html"))
|
||||
else:
|
||||
parts.append((self.templates[template]["html"].format(**template_vars), "html"))
|
||||
|
||||
for body, mime_type in parts:
|
||||
msg.attach(MIMEText(body.encode(encoding), mime_type, _charset=encoding))
|
||||
else:
|
||||
assert subject, "No subject provided"
|
||||
if text_body:
|
||||
msg.attach(MIMEText(text_body.encode(encoding), "plain", _charset=encoding))
|
||||
if html_body:
|
||||
msg.attach(MIMEText(html_body.encode(encoding), "html", _charset=encoding))
|
||||
if attachment_files:
|
||||
for filepath in attachment_files:
|
||||
with open(filepath, "rb") as fp:
|
||||
part = MIMEBase("application", "octet-stream")
|
||||
part.set_payload(fp.read())
|
||||
encode_base64(part)
|
||||
part.add_header(
|
||||
"Content-Disposition",
|
||||
f'attachment; filename="{os.path.basename(filepath)}"',
|
||||
)
|
||||
msg.attach(part)
|
||||
if attachment_payloads:
|
||||
for filename, payload in attachment_payloads:
|
||||
part = MIMEBase("application", "octet-stream")
|
||||
part.set_payload(payload)
|
||||
encode_base64(part)
|
||||
part.add_header("Content-Disposition", f'attachment; filename="{filename}"')
|
||||
msg.attach(part)
|
||||
return msg
|
||||
|
||||
@staticmethod
|
||||
def template_image_loader(directory_path):
|
||||
"""Return wrapper for the load_image_as_base64 function bind on the template directory"""
|
||||
|
||||
def _load_image_as_base64(path):
|
||||
return load_image_as_base64(
|
||||
os.path.join(directory_path, path)
|
||||
if directory_path and not os.path.isabs(path)
|
||||
else path
|
||||
)
|
||||
|
||||
return _load_image_as_base64
|
||||
|
||||
def send(
|
||||
self, recipients, msg=None, subject=None, just_try=None, cc=None, bcc=None, **forge_args
|
||||
):
|
||||
"""
|
||||
Send an email
|
||||
|
||||
:param recipients: The recipient(s) of the email. List of tuple(name, email) or
|
||||
just the email of the recipients.
|
||||
:param msg: The message of this email (as MIMEBase or derivated classes)
|
||||
:param subject: The subject of the email (only if the message is not provided
|
||||
using msg parameter)
|
||||
:param just_try: Enable just try mode (do not really send email, default: as defined on
|
||||
initialization)
|
||||
:param cc: Optional list of CC recipient addresses. List of tuple(name, email) or
|
||||
just the email of the recipients.
|
||||
:param bcc: Optional list of BCC recipient addresses. List of tuple(name, email) or
|
||||
just the email of the recipients.
|
||||
|
||||
All other parameters will be consider as parameters to forge the message
|
||||
(only if the message is not provided using msg parameter).
|
||||
"""
|
||||
recipients = [recipients] if not isinstance(recipients, list) else recipients
|
||||
msg = msg if msg else self.forge_message(recipients, subject, cc=cc, **forge_args)
|
||||
catch_addr = self._get_option("catch_all_addr")
|
||||
if catch_addr:
|
||||
log.debug(
|
||||
"Catch email originally send to %s (CC:%s, BCC:%s) to %s",
|
||||
", ".join(recipients),
|
||||
", ".join(cc) if isinstance(cc, list) else cc,
|
||||
", ".join(bcc) if isinstance(bcc, list) else bcc,
|
||||
catch_addr,
|
||||
)
|
||||
recipients = catch_addr if isinstance(catch_addr, list) else [catch_addr]
|
||||
else:
|
||||
if cc:
|
||||
recipients.extend(
|
||||
[
|
||||
recipient[1] if isinstance(recipient, tuple) else recipient
|
||||
for recipient in (cc if isinstance(cc, list) else [cc])
|
||||
]
|
||||
)
|
||||
if bcc:
|
||||
recipients.extend(
|
||||
[
|
||||
recipient[1] if isinstance(recipient, tuple) else recipient
|
||||
for recipient in (bcc if isinstance(bcc, list) else [bcc])
|
||||
]
|
||||
)
|
||||
|
||||
if just_try if just_try is not None else self._just_try:
|
||||
log.debug(
|
||||
'Just-try mode: do not really send this email to %s (subject="%s")',
|
||||
", ".join(recipients),
|
||||
subject or msg.get("subject", "No subject"),
|
||||
)
|
||||
return True
|
||||
|
||||
smtp_host = self._get_option("smtp_host")
|
||||
smtp_port = self._get_option("smtp_port")
|
||||
try:
|
||||
if self._get_option("smtp_ssl"):
|
||||
logging.info("Establish SSL connection to server %s:%s", smtp_host, smtp_port)
|
||||
server = smtplib.SMTP_SSL(smtp_host, smtp_port)
|
||||
else:
|
||||
logging.info("Establish connection to server %s:%s", smtp_host, smtp_port)
|
||||
server = smtplib.SMTP(smtp_host, smtp_port)
|
||||
if self._get_option("smtp_tls"):
|
||||
logging.info("Start TLS on SMTP connection")
|
||||
server.starttls()
|
||||
except smtplib.SMTPException:
|
||||
log.error("Error connecting to SMTP server %s:%s", smtp_host, smtp_port, exc_info=True)
|
||||
return False
|
||||
|
||||
if self._get_option("smtp_debug"):
|
||||
server.set_debuglevel(True)
|
||||
|
||||
smtp_user = self._get_option("smtp_user")
|
||||
smtp_password = self._get_option("smtp_password")
|
||||
if smtp_user and smtp_password:
|
||||
try:
|
||||
log.info("Try to authenticate on SMTP connection as %s", smtp_user)
|
||||
server.login(smtp_user, smtp_password)
|
||||
except smtplib.SMTPException:
|
||||
log.error(
|
||||
"Error authenticating on SMTP server %s:%s with user %s",
|
||||
smtp_host,
|
||||
smtp_port,
|
||||
smtp_user,
|
||||
exc_info=True,
|
||||
)
|
||||
return False
|
||||
|
||||
error = False
|
||||
try:
|
||||
log.info("Sending email to %s", ", ".join(recipients))
|
||||
server.sendmail(
|
||||
self._get_option("sender_email"),
|
||||
[
|
||||
recipient[1] if isinstance(recipient, tuple) else recipient
|
||||
for recipient in recipients
|
||||
],
|
||||
msg.as_string(),
|
||||
)
|
||||
except smtplib.SMTPException:
|
||||
error = True
|
||||
log.error("Error sending email to %s", ", ".join(recipients), exc_info=True)
|
||||
finally:
|
||||
server.quit()
|
||||
|
||||
return not error
|
1118
mylib/ldap.py
Normal file
1118
mylib/ldap.py
Normal file
File diff suppressed because it is too large
Load diff
138
mylib/mapping.py
Normal file
138
mylib/mapping.py
Normal file
|
@ -0,0 +1,138 @@
|
|||
"""
|
||||
My hash mapping library
|
||||
Mapping configuration
|
||||
{
|
||||
'[dst key 1]': { # Key name in the result
|
||||
|
||||
'order': [int], # Processing order between destinations keys
|
||||
|
||||
# Source values
|
||||
'other_key': [key], # Other key of the destination to use as source of values
|
||||
'key' : '[src key]', # Key of source hash to get source values
|
||||
'keys' : ['[sk1]', '[sk2]', ...], # List of source hash's keys to get source values
|
||||
|
||||
# Clean / convert values
|
||||
'cleanRegex': '[regex]', # Regex that be use to remove unwanted characters. Ex : [^0-9+]
|
||||
'convert': [function], # Function to use to convert value : Original value will be passed
|
||||
# as argument and the value retrieve will replace source value in
|
||||
# the result
|
||||
# Ex :
|
||||
# lambda x: x.strip()
|
||||
# lambda x: "myformat : %s" % x
|
||||
# Deduplicate / check values
|
||||
'deduplicate': [bool], # If True, sources values will be depluplicated
|
||||
'check': [function], # Function to use to check source value : Source value will be passed
|
||||
# as argument and if function return True, the value will be preserved
|
||||
# Ex :
|
||||
# lambda x: x in my_global_hash
|
||||
# Join values
|
||||
'join': '[glue]', # If present, sources values will be join using the "glue"
|
||||
|
||||
# Alternative mapping
|
||||
'or': { [map configuration] } # If this mapping case does not retrieve any value, try to
|
||||
# get value(s) with this other mapping configuration
|
||||
},
|
||||
'[dst key 2]': {
|
||||
[...]
|
||||
}
|
||||
}
|
||||
|
||||
Return format :
|
||||
{
|
||||
'[dst key 1]': ['v1','v2', ...],
|
||||
'[dst key 2]': [ ... ],
|
||||
[...]
|
||||
}
|
||||
"""
|
||||
|
||||
import logging
|
||||
import re
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def clean_value(value):
|
||||
"""Clean value as encoded string"""
|
||||
if isinstance(value, int):
|
||||
value = str(value)
|
||||
return value
|
||||
|
||||
|
||||
def get_values(dst, dst_key, src, m):
|
||||
"""Extract sources values"""
|
||||
values = []
|
||||
if "other_key" in m:
|
||||
if m["other_key"] in dst:
|
||||
values = dst[m["other_key"]]
|
||||
if "key" in m:
|
||||
if m["key"] in src and src[m["key"]] != "":
|
||||
values.append(clean_value(src[m["key"]]))
|
||||
|
||||
if "keys" in m:
|
||||
for key in m["keys"]:
|
||||
if key in src and src[key] != "":
|
||||
values.append(clean_value(src[key]))
|
||||
|
||||
# Clean and convert values
|
||||
if "cleanRegex" in m and len(values) > 0:
|
||||
new_values = []
|
||||
for v in values:
|
||||
nv = re.sub(m["cleanRegex"], "", v)
|
||||
if nv != "":
|
||||
new_values.append(nv)
|
||||
values = new_values
|
||||
|
||||
if "convert" in m and len(values) > 0:
|
||||
new_values = []
|
||||
for v in values:
|
||||
nv = m["convert"](v)
|
||||
if nv != "":
|
||||
new_values.append(nv)
|
||||
values = new_values
|
||||
|
||||
# Deduplicate values
|
||||
if m.get("deduplicate") and len(values) > 1:
|
||||
new_values = []
|
||||
for v in values:
|
||||
if v not in new_values:
|
||||
new_values.append(v)
|
||||
values = new_values
|
||||
|
||||
# Check values
|
||||
if "check" in m and len(values) > 0:
|
||||
new_values = []
|
||||
for v in values:
|
||||
if m["check"](v):
|
||||
new_values.append(v)
|
||||
else:
|
||||
log.debug("Invalid value %s for key %s", v, dst_key)
|
||||
values = new_values
|
||||
|
||||
# Join values
|
||||
if "join" in m and len(values) > 1:
|
||||
values = [m["join"].join(values)]
|
||||
|
||||
# Manage alternative mapping case
|
||||
if len(values) == 0 and "or" in m:
|
||||
values = get_values(dst, dst_key, src, m["or"])
|
||||
|
||||
return values
|
||||
|
||||
|
||||
def map_hash(mapping, src, dst=None):
|
||||
"""Map hash"""
|
||||
dst = dst if dst else {}
|
||||
assert isinstance(dst, dict)
|
||||
for dst_key in sorted(mapping.keys(), key=lambda x: mapping[x]["order"]):
|
||||
values = get_values(dst, dst_key, src, mapping[dst_key])
|
||||
|
||||
if len(values) == 0:
|
||||
if "required" in mapping[dst_key] and mapping[dst_key]["required"]:
|
||||
log.debug(
|
||||
"Destination key %s could not be filled from source but is required", dst_key
|
||||
)
|
||||
return False
|
||||
continue
|
||||
|
||||
dst[dst_key] = values
|
||||
return dst
|
112
mylib/mysql.py
Normal file
112
mylib/mysql.py
Normal file
|
@ -0,0 +1,112 @@
|
|||
""" MySQL client """
|
||||
|
||||
import logging
|
||||
import sys
|
||||
|
||||
import MySQLdb
|
||||
from MySQLdb._exceptions import Error
|
||||
|
||||
from mylib.db import DB, DBFailToConnect
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MyDB(DB):
|
||||
"""MySQL client"""
|
||||
|
||||
_host = None
|
||||
_user = None
|
||||
_pwd = None
|
||||
_db = None
|
||||
|
||||
def __init__(self, host, user, pwd, db, charset=None, **kwargs):
|
||||
self._host = host
|
||||
self._user = user
|
||||
self._pwd = pwd
|
||||
self._db = db
|
||||
self._charset = charset if charset else "utf8"
|
||||
super().__init__(**kwargs)
|
||||
|
||||
def connect(self, exit_on_error=True):
|
||||
"""Connect to MySQL server"""
|
||||
if self._conn is None:
|
||||
try:
|
||||
self._conn = MySQLdb.connect(
|
||||
host=self._host,
|
||||
user=self._user,
|
||||
passwd=self._pwd,
|
||||
db=self._db,
|
||||
charset=self._charset,
|
||||
use_unicode=True,
|
||||
)
|
||||
except Error as err:
|
||||
log.fatal(
|
||||
"An error occurred during MySQL database connection (%s@%s:%s).",
|
||||
self._user,
|
||||
self._host,
|
||||
self._db,
|
||||
exc_info=1,
|
||||
)
|
||||
if exit_on_error:
|
||||
sys.exit(1)
|
||||
else:
|
||||
raise DBFailToConnect(f"{self._user}@{self._host}:{self._db}") from err
|
||||
return True
|
||||
|
||||
def doSQL(self, sql, params=None):
|
||||
"""
|
||||
Run SQL query and commit changes (rollback on error)
|
||||
|
||||
:param sql: The SQL query
|
||||
:param params: The SQL query's parameters as dict (optional)
|
||||
|
||||
:return: True on success, False otherwise
|
||||
:rtype: bool
|
||||
"""
|
||||
if self.just_try:
|
||||
log.debug("Just-try mode : do not really execute SQL query '%s'", sql)
|
||||
return True
|
||||
cursor = self._conn.cursor()
|
||||
try:
|
||||
self._log_query(sql, params)
|
||||
cursor.execute(sql, params)
|
||||
self._conn.commit()
|
||||
return True
|
||||
except Error:
|
||||
self._log_query_exception(sql, params)
|
||||
self._conn.rollback()
|
||||
return False
|
||||
|
||||
def doSelect(self, sql, params=None):
|
||||
"""
|
||||
Run SELECT SQL query and return list of selected rows as dict
|
||||
|
||||
:param sql: The SQL query
|
||||
:param params: The SQL query's parameters as dict (optional)
|
||||
|
||||
:return: List of selected rows as dict on success, False otherwise
|
||||
:rtype: list, bool
|
||||
"""
|
||||
try:
|
||||
self._log_query(sql, params)
|
||||
cursor = self._conn.cursor()
|
||||
cursor.execute(sql, params)
|
||||
return [
|
||||
{field[0]: row[idx] for idx, field in enumerate(cursor.description)}
|
||||
for row in cursor.fetchall()
|
||||
]
|
||||
except Error:
|
||||
self._log_query_exception(sql, params)
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def _quote_table_name(table):
|
||||
"""Quote table name"""
|
||||
return "`{}`".format( # pylint: disable=consider-using-f-string
|
||||
"`.`".join(table.split("."))
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _quote_field_name(field):
|
||||
"""Quote table name"""
|
||||
return f"`{field}`"
|
919
mylib/opening_hours.py
Normal file
919
mylib/opening_hours.py
Normal file
|
@ -0,0 +1,919 @@
|
|||
""" Opening hours helpers """
|
||||
|
||||
import datetime
|
||||
import logging
|
||||
import re
|
||||
import time
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
week_days = ["lundi", "mardi", "mercredi", "jeudi", "vendredi", "samedi", "dimanche"]
|
||||
date_format = "%d/%m/%Y"
|
||||
date_pattern = re.compile("^([0-9]{2})/([0-9]{2})/([0-9]{4})$")
|
||||
time_pattern = re.compile("^([0-9]{1,2})h([0-9]{2})?$")
|
||||
_nonworking_french_public_days_of_the_year_cache = {}
|
||||
|
||||
|
||||
def easter_date(year):
|
||||
"""Compute easter date for the specified year"""
|
||||
a = year // 100
|
||||
b = year % 100
|
||||
c = (3 * (a + 25)) // 4
|
||||
d = (3 * (a + 25)) % 4
|
||||
e = (8 * (a + 11)) // 25
|
||||
f = (5 * a + b) % 19
|
||||
g = (19 * f + c - e) % 30
|
||||
h = (f + 11 * g) // 319
|
||||
j = (60 * (5 - d) + b) // 4
|
||||
k = (60 * (5 - d) + b) % 4
|
||||
m = (2 * j - k - g + h) % 7
|
||||
n = (g - h + m + 114) // 31
|
||||
p = (g - h + m + 114) % 31
|
||||
day = p + 1
|
||||
month = n
|
||||
return datetime.date(year, month, day)
|
||||
|
||||
|
||||
def nonworking_french_public_days_of_the_year(year=None):
|
||||
"""Compute dict of nonworking french public days for the specified year"""
|
||||
if year is None:
|
||||
year = datetime.date.today().year
|
||||
if year not in _nonworking_french_public_days_of_the_year_cache:
|
||||
dp = easter_date(year)
|
||||
_nonworking_french_public_days_of_the_year_cache[year] = {
|
||||
"1janvier": datetime.date(year, 1, 1),
|
||||
"paques": dp,
|
||||
"lundi_paques": (dp + datetime.timedelta(1)),
|
||||
"1mai": datetime.date(year, 5, 1),
|
||||
"8mai": datetime.date(year, 5, 8),
|
||||
"jeudi_ascension": (dp + datetime.timedelta(39)),
|
||||
"pentecote": (dp + datetime.timedelta(49)),
|
||||
"lundi_pentecote": (dp + datetime.timedelta(50)),
|
||||
"14juillet": datetime.date(year, 7, 14),
|
||||
"15aout": datetime.date(year, 8, 15),
|
||||
"1novembre": datetime.date(year, 11, 1),
|
||||
"11novembre": datetime.date(year, 11, 11),
|
||||
"noel": datetime.date(year, 12, 25),
|
||||
"saint_etienne": datetime.date(year, 12, 26),
|
||||
}
|
||||
return _nonworking_french_public_days_of_the_year_cache[year]
|
||||
|
||||
|
||||
def parse_exceptional_closures(values):
|
||||
"""Parse exceptional closures values"""
|
||||
exceptional_closures = []
|
||||
for value in values:
|
||||
days = []
|
||||
hours_periods = []
|
||||
words = value.strip().split()
|
||||
for word in words:
|
||||
if not word:
|
||||
continue
|
||||
parts = word.split("-")
|
||||
if len(parts) == 1:
|
||||
# ex: 31/02/2017
|
||||
ptime = time.strptime(word, date_format)
|
||||
date = datetime.date(ptime.tm_year, ptime.tm_mon, ptime.tm_mday)
|
||||
if date not in days:
|
||||
days.append(date)
|
||||
elif len(parts) == 2:
|
||||
# ex: 18/12/2017-20/12/2017 ou 9h-10h30
|
||||
if date_pattern.match(parts[0]) and date_pattern.match(parts[1]):
|
||||
# ex: 18/12/2017-20/12/2017
|
||||
pstart = time.strptime(parts[0], date_format)
|
||||
pstop = time.strptime(parts[1], date_format)
|
||||
if pstop <= pstart:
|
||||
raise ValueError(f"Day {parts[1]} <= {parts[0]}")
|
||||
|
||||
date = datetime.date(pstart.tm_year, pstart.tm_mon, pstart.tm_mday)
|
||||
stop_date = datetime.date(pstop.tm_year, pstop.tm_mon, pstop.tm_mday)
|
||||
while date <= stop_date:
|
||||
if date not in days:
|
||||
days.append(date)
|
||||
date += datetime.timedelta(days=1)
|
||||
else:
|
||||
# ex: 9h-10h30
|
||||
mstart = time_pattern.match(parts[0])
|
||||
mstop = time_pattern.match(parts[1])
|
||||
if not mstart or not mstop:
|
||||
raise ValueError(f'"{word}" is not a valid time period')
|
||||
hstart = datetime.time(int(mstart.group(1)), int(mstart.group(2) or 0))
|
||||
hstop = datetime.time(int(mstop.group(1)), int(mstop.group(2) or 0))
|
||||
if hstop <= hstart:
|
||||
raise ValueError(f"Time {parts[1]} <= {parts[0]}")
|
||||
hours_periods.append({"start": hstart, "stop": hstop})
|
||||
else:
|
||||
raise ValueError(f'Invalid number of part in this word: "{word}"')
|
||||
if not days:
|
||||
raise ValueError(f'No days found in value "{value}"')
|
||||
exceptional_closures.append({"days": days, "hours_periods": hours_periods})
|
||||
return exceptional_closures
|
||||
|
||||
|
||||
def parse_normal_opening_hours(values):
|
||||
"""Parse normal opening hours"""
|
||||
normal_opening_hours = []
|
||||
for value in values:
|
||||
days = []
|
||||
hours_periods = []
|
||||
words = value.strip().split()
|
||||
for word in words:
|
||||
if not word:
|
||||
continue
|
||||
parts = word.split("-")
|
||||
if len(parts) == 1:
|
||||
# ex: jeudi
|
||||
if word not in week_days:
|
||||
raise ValueError(f'"{word}" is not a valid week day')
|
||||
if word not in days:
|
||||
days.append(word)
|
||||
elif len(parts) == 2:
|
||||
# ex: lundi-jeudi ou 9h-10h30
|
||||
if parts[0] in week_days and parts[1] in week_days:
|
||||
# ex: lundi-jeudi
|
||||
if week_days.index(parts[1]) <= week_days.index(parts[0]):
|
||||
raise ValueError(f'"{parts[1]}" is before "{parts[0]}"')
|
||||
started = False
|
||||
for d in week_days:
|
||||
if not started and d != parts[0]:
|
||||
continue
|
||||
started = True
|
||||
if d not in days:
|
||||
days.append(d)
|
||||
if d == parts[1]:
|
||||
break
|
||||
else:
|
||||
# ex: 9h-10h30
|
||||
mstart = time_pattern.match(parts[0])
|
||||
mstop = time_pattern.match(parts[1])
|
||||
if not mstart or not mstop:
|
||||
raise ValueError(f'"{word}" is not a valid time period')
|
||||
hstart = datetime.time(int(mstart.group(1)), int(mstart.group(2) or 0))
|
||||
hstop = datetime.time(int(mstop.group(1)), int(mstop.group(2) or 0))
|
||||
if hstop <= hstart:
|
||||
raise ValueError(f"Time {parts[1]} <= {parts[0]}")
|
||||
hours_periods.append({"start": hstart, "stop": hstop})
|
||||
else:
|
||||
raise ValueError(f'Invalid number of part in this word: "{word}"')
|
||||
if not days and not hours_periods:
|
||||
raise ValueError(f'No days or hours period found in this value: "{value}"')
|
||||
normal_opening_hours.append({"days": days, "hours_periods": hours_periods})
|
||||
for idx, noh in enumerate(normal_opening_hours):
|
||||
normal_opening_hours[idx]["hours_periods"] = sorted_hours_periods(noh["hours_periods"])
|
||||
return sorted_opening_hours(normal_opening_hours)
|
||||
|
||||
|
||||
def sorted_hours_periods(hours_periods):
|
||||
"""Sort hours periods"""
|
||||
return sorted(hours_periods, key=lambda hp: (hp["start"], hp["stop"]))
|
||||
|
||||
|
||||
def sorted_opening_hours(opening_hours):
|
||||
"""Sort opening hours"""
|
||||
return sorted(
|
||||
opening_hours,
|
||||
key=lambda x: (
|
||||
week_days.index(x["days"][0]) if x["days"] else None,
|
||||
x["hours_periods"][0]["start"] if x["hours_periods"] else datetime.datetime.min.time(),
|
||||
x["hours_periods"][0]["stop"] if x["hours_periods"] else datetime.datetime.max.time(),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def its_nonworking_day(nonworking_public_holidays_values, date=None):
|
||||
"""Check if is a non-working day"""
|
||||
if not nonworking_public_holidays_values:
|
||||
return False
|
||||
date = date if date else datetime.date.today()
|
||||
log.debug("its_nonworking_day(%s): values=%s", date, nonworking_public_holidays_values)
|
||||
nonworking_days = nonworking_french_public_days_of_the_year(year=date.year)
|
||||
for day in nonworking_public_holidays_values:
|
||||
if day in nonworking_days and nonworking_days[day] == date:
|
||||
log.debug("its_nonworking_day(%s): %s", date, day)
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def its_exceptionally_closed(exceptional_closures_values, when=None, parse=True, all_day=False):
|
||||
"""Check if it's exceptionally closed"""
|
||||
if not exceptional_closures_values:
|
||||
return False
|
||||
when = when if when else datetime.datetime.now()
|
||||
assert isinstance(when, (datetime.date, datetime.datetime))
|
||||
when_date = when.date() if isinstance(when, datetime.datetime) else when
|
||||
exceptional_closures = (
|
||||
parse_exceptional_closures(exceptional_closures_values)
|
||||
if parse
|
||||
else exceptional_closures_values
|
||||
)
|
||||
log.debug("its_exceptionally_closed(%s): exceptional closures=%s", when, exceptional_closures)
|
||||
for cl in exceptional_closures:
|
||||
if when_date not in cl["days"]:
|
||||
log.debug(
|
||||
"its_exceptionally_closed(%s): %s not in days (%s)", when, when_date, cl["days"]
|
||||
)
|
||||
continue
|
||||
if not cl["hours_periods"]:
|
||||
# All day exceptional closure
|
||||
return True
|
||||
if all_day:
|
||||
# Wanted an all day closure, ignore it
|
||||
continue
|
||||
for hp in cl["hours_periods"]:
|
||||
if hp["start"] <= when.time() <= hp["stop"]:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def get_exceptional_closures_hours(exceptional_closures_values, date=None, parse=True):
|
||||
"""Get exceptional closures hours of the day"""
|
||||
if not exceptional_closures_values:
|
||||
return []
|
||||
date = date if date else datetime.date.today()
|
||||
exceptional_closures = (
|
||||
parse_exceptional_closures(exceptional_closures_values)
|
||||
if parse
|
||||
else exceptional_closures_values
|
||||
)
|
||||
log.debug(
|
||||
"get_exceptional_closures_hours(%s): exceptional closures=%s", date, exceptional_closures
|
||||
)
|
||||
exceptional_closures_hours = []
|
||||
for cl in exceptional_closures:
|
||||
if date not in cl["days"]:
|
||||
log.debug("get_exceptional_closures_hours(%s): not in days (%s)", date, cl["days"])
|
||||
continue
|
||||
if not cl["hours_periods"]:
|
||||
log.debug(
|
||||
"get_exceptional_closures_hours(%s): it's exceptionally closed all the day", date
|
||||
)
|
||||
return [
|
||||
{
|
||||
"start": datetime.datetime.min.time(),
|
||||
"stop": datetime.datetime.max.time(),
|
||||
}
|
||||
]
|
||||
exceptional_closures_hours.extend(cl["hours_periods"])
|
||||
log.debug(
|
||||
"get_exceptional_closures_hours(%s): exceptional closures hours=%s",
|
||||
date,
|
||||
exceptional_closures_hours,
|
||||
)
|
||||
return sorted_hours_periods(exceptional_closures_hours)
|
||||
|
||||
|
||||
def its_normally_open(normal_opening_hours_values, when=None, parse=True, ignore_time=False):
|
||||
"""Check if it's normally open"""
|
||||
when = when if when else datetime.datetime.now()
|
||||
if not normal_opening_hours_values:
|
||||
log.debug(
|
||||
"its_normally_open(%s): no normal opening hours defined, consider as opened", when
|
||||
)
|
||||
return True
|
||||
when_weekday = week_days[when.timetuple().tm_wday]
|
||||
log.debug("its_normally_open(%s): week day=%s", when, when_weekday)
|
||||
normal_opening_hours = (
|
||||
parse_normal_opening_hours(normal_opening_hours_values)
|
||||
if parse
|
||||
else normal_opening_hours_values
|
||||
)
|
||||
log.debug("its_normally_open(%s): normal opening hours=%s", when, normal_opening_hours)
|
||||
for oh in normal_opening_hours:
|
||||
if oh["days"] and when_weekday not in oh["days"]:
|
||||
log.debug("its_normally_open(%s): %s not in days (%s)", when, when_weekday, oh["days"])
|
||||
continue
|
||||
if not oh["hours_periods"] or ignore_time:
|
||||
return True
|
||||
for hp in oh["hours_periods"]:
|
||||
if hp["start"] <= when.time() <= hp["stop"]:
|
||||
return True
|
||||
log.debug("its_normally_open(%s): not in normal opening hours", when)
|
||||
return False
|
||||
|
||||
|
||||
def its_opening_day(
|
||||
normal_opening_hours_values=None,
|
||||
exceptional_closures_values=None,
|
||||
nonworking_public_holidays_values=None,
|
||||
date=None,
|
||||
parse=True,
|
||||
):
|
||||
"""Check if it's an opening day"""
|
||||
date = date if date else datetime.date.today()
|
||||
if its_nonworking_day(nonworking_public_holidays_values, date=date):
|
||||
return False
|
||||
if its_exceptionally_closed(exceptional_closures_values, when=date, all_day=True, parse=parse):
|
||||
return False
|
||||
return its_normally_open(normal_opening_hours_values, when=date, parse=parse, ignore_time=True)
|
||||
|
||||
|
||||
def is_closed(
|
||||
normal_opening_hours_values=None,
|
||||
exceptional_closures_values=None,
|
||||
nonworking_public_holidays_values=None,
|
||||
exceptional_closure_on_nonworking_public_days=False,
|
||||
when=None,
|
||||
on_error="raise",
|
||||
):
|
||||
"""Check if closed"""
|
||||
if not when:
|
||||
when = datetime.datetime.now()
|
||||
when_date = when.date()
|
||||
when_time = when.time()
|
||||
when_weekday = week_days[when.timetuple().tm_wday]
|
||||
on_error_result = None
|
||||
if on_error == "closed":
|
||||
on_error_result = {
|
||||
"closed": True,
|
||||
"exceptional_closure": False,
|
||||
"exceptional_closure_all_day": False,
|
||||
}
|
||||
elif on_error == "opened":
|
||||
on_error_result = {
|
||||
"closed": False,
|
||||
"exceptional_closure": False,
|
||||
"exceptional_closure_all_day": False,
|
||||
}
|
||||
|
||||
log.debug(
|
||||
"When = %s => date = %s / time = %s / week day = %s",
|
||||
when,
|
||||
when_date,
|
||||
when_time,
|
||||
when_weekday,
|
||||
)
|
||||
# Handle non-working days
|
||||
if its_nonworking_day(nonworking_public_holidays_values, date=when_date):
|
||||
return {
|
||||
"closed": True,
|
||||
"exceptional_closure": exceptional_closure_on_nonworking_public_days,
|
||||
"exceptional_closure_all_day": exceptional_closure_on_nonworking_public_days,
|
||||
}
|
||||
|
||||
# Handle exceptional closures
|
||||
try:
|
||||
if its_exceptionally_closed(exceptional_closures_values, when=when):
|
||||
return {
|
||||
"closed": True,
|
||||
"exceptional_closure": True,
|
||||
"exceptional_closure_all_day": its_exceptionally_closed(
|
||||
exceptional_closures_values, when=when, all_day=True
|
||||
),
|
||||
}
|
||||
except ValueError as e:
|
||||
if on_error_result is None:
|
||||
log.error("Fail to parse exceptional closures", exc_info=True)
|
||||
raise e from e
|
||||
log.error("Fail to parse exceptional closures, consider as %s", on_error, exc_info=True)
|
||||
return on_error_result
|
||||
|
||||
# Finally, handle normal opening hours
|
||||
try:
|
||||
return {
|
||||
"closed": not its_normally_open(normal_opening_hours_values, when=when),
|
||||
"exceptional_closure": False,
|
||||
"exceptional_closure_all_day": False,
|
||||
}
|
||||
except ValueError as e: # pylint: disable=broad-except
|
||||
if on_error_result is None:
|
||||
log.error("Fail to parse normal opening hours", exc_info=True)
|
||||
raise e from e
|
||||
log.error("Fail to parse normal opening hours, consider as %s", on_error, exc_info=True)
|
||||
return on_error_result
|
||||
|
||||
|
||||
def next_opening_date(
|
||||
normal_opening_hours_values=None,
|
||||
exceptional_closures_values=None,
|
||||
nonworking_public_holidays_values=None,
|
||||
date=None,
|
||||
max_anaylse_days=None,
|
||||
parse=True,
|
||||
):
|
||||
"""Search for the next opening day"""
|
||||
date = date if date else datetime.date.today()
|
||||
max_anaylse_days = max_anaylse_days if max_anaylse_days is not None else 30
|
||||
if parse:
|
||||
try:
|
||||
normal_opening_hours_values = (
|
||||
parse_normal_opening_hours(normal_opening_hours_values)
|
||||
if normal_opening_hours_values
|
||||
else None
|
||||
)
|
||||
exceptional_closures_values = (
|
||||
parse_exceptional_closures(exceptional_closures_values)
|
||||
if exceptional_closures_values
|
||||
else None
|
||||
)
|
||||
except ValueError: # pylint: disable=broad-except
|
||||
log.error(
|
||||
"next_opening_date(%s): fail to parse normal opening hours or exceptional closures",
|
||||
date,
|
||||
exc_info=True,
|
||||
)
|
||||
return False
|
||||
added_days = 0
|
||||
while added_days <= max_anaylse_days:
|
||||
test_date = date + datetime.timedelta(days=added_days)
|
||||
if its_opening_day(
|
||||
normal_opening_hours_values=normal_opening_hours_values,
|
||||
exceptional_closures_values=exceptional_closures_values,
|
||||
nonworking_public_holidays_values=nonworking_public_holidays_values,
|
||||
date=test_date,
|
||||
parse=False,
|
||||
):
|
||||
return test_date
|
||||
added_days += 1
|
||||
log.debug(
|
||||
"next_opening_date(%s): no opening day found in the next %d days", date, max_anaylse_days
|
||||
)
|
||||
return False
|
||||
|
||||
|
||||
def next_opening_hour(
|
||||
normal_opening_hours_values=None,
|
||||
exceptional_closures_values=None,
|
||||
nonworking_public_holidays_values=None,
|
||||
when=None,
|
||||
max_anaylse_days=None,
|
||||
parse=True,
|
||||
):
|
||||
"""Search for the next opening hour"""
|
||||
when = when if when else datetime.datetime.now()
|
||||
max_anaylse_days = max_anaylse_days if max_anaylse_days is not None else 30
|
||||
if parse:
|
||||
try:
|
||||
normal_opening_hours_values = (
|
||||
parse_normal_opening_hours(normal_opening_hours_values)
|
||||
if normal_opening_hours_values
|
||||
else None
|
||||
)
|
||||
exceptional_closures_values = (
|
||||
parse_exceptional_closures(exceptional_closures_values)
|
||||
if exceptional_closures_values
|
||||
else None
|
||||
)
|
||||
except ValueError: # pylint: disable=broad-except
|
||||
log.error(
|
||||
"next_opening_hour(%s): fail to parse normal opening hours or exceptional closures",
|
||||
when,
|
||||
exc_info=True,
|
||||
)
|
||||
return False
|
||||
date = next_opening_date(
|
||||
normal_opening_hours_values=normal_opening_hours_values,
|
||||
exceptional_closures_values=exceptional_closures_values,
|
||||
nonworking_public_holidays_values=nonworking_public_holidays_values,
|
||||
date=when.date(),
|
||||
max_anaylse_days=max_anaylse_days,
|
||||
parse=False,
|
||||
)
|
||||
if not date:
|
||||
log.debug(
|
||||
"next_opening_hour(%s): no opening day found in the next %d days",
|
||||
when,
|
||||
max_anaylse_days,
|
||||
)
|
||||
return False
|
||||
log.debug("next_opening_hour(%s): next opening date=%s", when, date)
|
||||
weekday = week_days[date.timetuple().tm_wday]
|
||||
log.debug("next_opening_hour(%s): next opening week day=%s", when, weekday)
|
||||
exceptional_closures_hours = get_exceptional_closures_hours(
|
||||
exceptional_closures_values, date=date, parse=False
|
||||
)
|
||||
log.debug(
|
||||
"next_opening_hour(%s): next opening day exceptional closures hours=%s",
|
||||
when,
|
||||
exceptional_closures_hours,
|
||||
)
|
||||
next_opening_datetime = None
|
||||
exceptionally_closed = False
|
||||
exceptionally_closed_all_day = False
|
||||
in_opening_hours = date != when.date()
|
||||
for oh in normal_opening_hours_values:
|
||||
if exceptionally_closed_all_day:
|
||||
break
|
||||
|
||||
if oh["days"] and weekday not in oh["days"]:
|
||||
log.debug("next_opening_hour(%s): %s not in days (%s)", when, weekday, oh["days"])
|
||||
continue
|
||||
|
||||
log.debug(
|
||||
"next_opening_hour(%s): %s in days (%s), handle opening hours %s",
|
||||
when,
|
||||
weekday,
|
||||
oh["days"],
|
||||
oh["hours_periods"],
|
||||
)
|
||||
|
||||
if not oh["hours_periods"]:
|
||||
log.debug(
|
||||
"next_opening_hour(%s): %s is an all day opening day, handle exceptional closures "
|
||||
"hours %s to find the minimal opening time",
|
||||
when,
|
||||
weekday,
|
||||
exceptional_closures_hours,
|
||||
)
|
||||
if date == when.date():
|
||||
in_opening_hours = True
|
||||
test_time = when.time() if when.date() == date else datetime.datetime.min.time()
|
||||
for cl in exceptional_closures_hours:
|
||||
if cl["start"] <= test_time < cl["stop"]:
|
||||
if cl["stop"] >= datetime.datetime.max.time():
|
||||
exceptionally_closed = True
|
||||
exceptionally_closed_all_day = True
|
||||
next_opening_datetime = None
|
||||
break
|
||||
test_time = cl["stop"]
|
||||
else:
|
||||
break
|
||||
if not exceptionally_closed_all_day:
|
||||
candidate_next_opening_datetime = datetime.datetime.combine(date, test_time)
|
||||
next_opening_datetime = (
|
||||
candidate_next_opening_datetime
|
||||
if not next_opening_datetime
|
||||
or candidate_next_opening_datetime < next_opening_datetime
|
||||
else next_opening_datetime
|
||||
)
|
||||
continue
|
||||
|
||||
log.debug(
|
||||
"next_opening_hour(%s): only opened during some hours periods (%s) on %s, find the "
|
||||
"minimal starting time",
|
||||
when,
|
||||
oh["hours_periods"],
|
||||
weekday,
|
||||
)
|
||||
test_time = datetime.datetime.max.time()
|
||||
for hp in oh["hours_periods"]:
|
||||
if date == when.date() and hp["stop"] < when.time():
|
||||
log.debug(
|
||||
"next_opening_hour(%s): ignore opening hours %s before specified when time %s",
|
||||
when,
|
||||
hp,
|
||||
when.time(),
|
||||
)
|
||||
continue
|
||||
if date == when.date() and hp["start"] <= when.time() < hp["stop"]:
|
||||
in_opening_hours = True
|
||||
if exceptional_closures_hours:
|
||||
log.debug(
|
||||
"next_opening_hour(%s): check if opening hours %s match with exceptional "
|
||||
"closure hours %s",
|
||||
when,
|
||||
hp,
|
||||
exceptional_closures_hours,
|
||||
)
|
||||
for cl in exceptional_closures_hours:
|
||||
if cl["start"] <= hp["start"] and cl["stop"] >= hp["stop"]:
|
||||
log.debug(
|
||||
"next_opening_hour(%s): opening hour %s is included in exceptional "
|
||||
"closure hours %s",
|
||||
when,
|
||||
hp,
|
||||
cl,
|
||||
)
|
||||
exceptionally_closed = True
|
||||
break
|
||||
if hp["start"] < cl["start"]:
|
||||
log.debug(
|
||||
"next_opening_hour(%s): opening hour %s start before closure hours %s",
|
||||
when,
|
||||
hp,
|
||||
cl,
|
||||
)
|
||||
test_time = hp["start"] if hp["start"] < test_time else test_time
|
||||
elif cl["stop"] >= hp["start"] and cl["stop"] < hp["stop"]:
|
||||
log.debug(
|
||||
"next_opening_hour(%s): opening hour %s end after closure hours %s",
|
||||
when,
|
||||
hp,
|
||||
cl,
|
||||
)
|
||||
test_time = cl["stop"] if cl["stop"] < test_time else test_time
|
||||
elif hp["start"] < test_time:
|
||||
log.debug(
|
||||
"next_opening_hour(%s): no exceptional closure hours, use opening hours start "
|
||||
"time %s",
|
||||
when,
|
||||
hp["start"],
|
||||
)
|
||||
test_time = hp["start"]
|
||||
|
||||
if test_time < datetime.datetime.max.time():
|
||||
if date == when.date() and test_time < when.time():
|
||||
test_time = when.time()
|
||||
candidate_next_opening_datetime = datetime.datetime.combine(date, test_time)
|
||||
next_opening_datetime = (
|
||||
candidate_next_opening_datetime
|
||||
if not next_opening_datetime
|
||||
or candidate_next_opening_datetime < next_opening_datetime
|
||||
else next_opening_datetime
|
||||
)
|
||||
|
||||
if not next_opening_datetime and (
|
||||
exceptionally_closed or (date == when.date() and not in_opening_hours)
|
||||
):
|
||||
new_max_anaylse_days = max_anaylse_days - (date - when.date()).days
|
||||
if new_max_anaylse_days > 0:
|
||||
log.debug(
|
||||
"next_opening_hour(%s): exceptionally closed on %s, try on following %d days",
|
||||
when,
|
||||
date,
|
||||
new_max_anaylse_days,
|
||||
)
|
||||
next_opening_datetime = next_opening_hour(
|
||||
normal_opening_hours_values=normal_opening_hours_values,
|
||||
exceptional_closures_values=exceptional_closures_values,
|
||||
nonworking_public_holidays_values=nonworking_public_holidays_values,
|
||||
when=datetime.datetime.combine(
|
||||
date + datetime.timedelta(days=1), datetime.datetime.min.time()
|
||||
),
|
||||
max_anaylse_days=new_max_anaylse_days,
|
||||
parse=False,
|
||||
)
|
||||
if not next_opening_datetime:
|
||||
log.debug(
|
||||
"next_opening_hour(%s): no opening hours found in next %d days", when, max_anaylse_days
|
||||
)
|
||||
return False
|
||||
log.debug("next_opening_hour(%s): next opening hours=%s", when, next_opening_datetime)
|
||||
return next_opening_datetime
|
||||
|
||||
|
||||
def previous_opening_date(
|
||||
normal_opening_hours_values=None,
|
||||
exceptional_closures_values=None,
|
||||
nonworking_public_holidays_values=None,
|
||||
date=None,
|
||||
max_anaylse_days=None,
|
||||
parse=True,
|
||||
):
|
||||
"""Search for the previous opening day"""
|
||||
date = date if date else datetime.date.today()
|
||||
max_anaylse_days = max_anaylse_days if max_anaylse_days is not None else 30
|
||||
if parse:
|
||||
try:
|
||||
normal_opening_hours_values = (
|
||||
parse_normal_opening_hours(normal_opening_hours_values)
|
||||
if normal_opening_hours_values
|
||||
else None
|
||||
)
|
||||
exceptional_closures_values = (
|
||||
parse_exceptional_closures(exceptional_closures_values)
|
||||
if exceptional_closures_values
|
||||
else None
|
||||
)
|
||||
except ValueError: # pylint: disable=broad-except
|
||||
log.error(
|
||||
"previous_opening_date(%s): fail to parse normal opening hours or exceptional "
|
||||
"closures",
|
||||
date,
|
||||
exc_info=True,
|
||||
)
|
||||
return False
|
||||
days = 0
|
||||
while days <= max_anaylse_days:
|
||||
test_date = date - datetime.timedelta(days=days)
|
||||
if its_opening_day(
|
||||
normal_opening_hours_values=normal_opening_hours_values,
|
||||
exceptional_closures_values=exceptional_closures_values,
|
||||
nonworking_public_holidays_values=nonworking_public_holidays_values,
|
||||
date=test_date,
|
||||
parse=False,
|
||||
):
|
||||
return test_date
|
||||
days += 1
|
||||
log.debug(
|
||||
"previous_opening_date(%s): no opening day found in the next %d days",
|
||||
date,
|
||||
max_anaylse_days,
|
||||
)
|
||||
return False
|
||||
|
||||
|
||||
def previous_opening_hour(
|
||||
normal_opening_hours_values=None,
|
||||
exceptional_closures_values=None,
|
||||
nonworking_public_holidays_values=None,
|
||||
when=None,
|
||||
max_anaylse_days=None,
|
||||
parse=True,
|
||||
):
|
||||
"""Search for the previous opening hour"""
|
||||
when = when if when else datetime.datetime.now()
|
||||
max_anaylse_days = max_anaylse_days if max_anaylse_days is not None else 30
|
||||
if parse:
|
||||
try:
|
||||
normal_opening_hours_values = (
|
||||
parse_normal_opening_hours(normal_opening_hours_values)
|
||||
if normal_opening_hours_values
|
||||
else None
|
||||
)
|
||||
exceptional_closures_values = (
|
||||
parse_exceptional_closures(exceptional_closures_values)
|
||||
if exceptional_closures_values
|
||||
else None
|
||||
)
|
||||
except ValueError: # pylint: disable=broad-except
|
||||
log.error(
|
||||
"previous_opening_hour(%s): fail to parse normal opening hours or exceptional "
|
||||
"closures",
|
||||
when,
|
||||
exc_info=True,
|
||||
)
|
||||
return False
|
||||
date = previous_opening_date(
|
||||
normal_opening_hours_values=normal_opening_hours_values,
|
||||
exceptional_closures_values=exceptional_closures_values,
|
||||
nonworking_public_holidays_values=nonworking_public_holidays_values,
|
||||
date=when.date(),
|
||||
max_anaylse_days=max_anaylse_days,
|
||||
parse=False,
|
||||
)
|
||||
if not date:
|
||||
log.debug(
|
||||
"previous_opening_hour(%s): no opening day found in the previous %d days",
|
||||
when,
|
||||
max_anaylse_days,
|
||||
)
|
||||
return False
|
||||
log.debug("previous_opening_hour(%s): previous opening date=%s", when, date)
|
||||
weekday = week_days[date.timetuple().tm_wday]
|
||||
log.debug("previous_opening_hour(%s): previous opening week day=%s", when, weekday)
|
||||
exceptional_closures_hours = get_exceptional_closures_hours(
|
||||
exceptional_closures_values, date=date, parse=False
|
||||
)
|
||||
log.debug(
|
||||
"previous_opening_hour(%s): previous opening day exceptional closures hours=%s",
|
||||
when,
|
||||
exceptional_closures_hours,
|
||||
)
|
||||
previous_opening_datetime = None
|
||||
exceptionally_closed = False
|
||||
exceptionally_closed_all_day = False
|
||||
in_opening_hours = date != when.date()
|
||||
for oh in reversed(normal_opening_hours_values):
|
||||
if exceptionally_closed_all_day:
|
||||
break
|
||||
|
||||
if oh["days"] and weekday not in oh["days"]:
|
||||
log.debug("previous_opening_hour(%s): %s not in days (%s)", when, weekday, oh["days"])
|
||||
continue
|
||||
|
||||
log.debug(
|
||||
"previous_opening_hour(%s): %s in days (%s), handle opening hours %s",
|
||||
when,
|
||||
weekday,
|
||||
oh["days"],
|
||||
oh["hours_periods"],
|
||||
)
|
||||
|
||||
if not oh["hours_periods"]:
|
||||
log.debug(
|
||||
"previous_opening_hour(%s): %s is an all day opening day, handle exceptional "
|
||||
"closures hours %s to find the maximal opening time",
|
||||
when,
|
||||
weekday,
|
||||
exceptional_closures_hours,
|
||||
)
|
||||
if date == when.date():
|
||||
in_opening_hours = True
|
||||
test_time = when.time() if when.date() == date else datetime.datetime.max.time()
|
||||
for cl in exceptional_closures_hours:
|
||||
if cl["start"] <= test_time < cl["stop"]:
|
||||
if cl["start"] <= datetime.datetime.min.time():
|
||||
exceptionally_closed = True
|
||||
exceptionally_closed_all_day = True
|
||||
previous_opening_datetime = None
|
||||
break
|
||||
test_time = cl["start"]
|
||||
else:
|
||||
break
|
||||
if not exceptionally_closed_all_day:
|
||||
candidate_previous_opening_datetime = datetime.datetime.combine(date, test_time)
|
||||
previous_opening_datetime = (
|
||||
candidate_previous_opening_datetime
|
||||
if not previous_opening_datetime
|
||||
or candidate_previous_opening_datetime > previous_opening_datetime
|
||||
else previous_opening_datetime
|
||||
)
|
||||
continue
|
||||
|
||||
log.debug(
|
||||
"previous_opening_hour(%s): only opened during some hours periods (%s) on %s, find the "
|
||||
"maximal opening time",
|
||||
when,
|
||||
oh["hours_periods"],
|
||||
weekday,
|
||||
)
|
||||
test_time = datetime.datetime.min.time()
|
||||
for hp in reversed(oh["hours_periods"]):
|
||||
if date == when.date() and hp["start"] > when.time():
|
||||
log.debug(
|
||||
"previous_opening_hour(%s): ignore opening hours %s starting before specified "
|
||||
"when time %s",
|
||||
when,
|
||||
hp,
|
||||
when.time(),
|
||||
)
|
||||
continue
|
||||
if date == when.date() and hp["start"] <= when.time() < hp["stop"]:
|
||||
in_opening_hours = True
|
||||
if exceptional_closures_hours:
|
||||
log.debug(
|
||||
"previous_opening_hour(%s): check if opening hours %s match with exceptional "
|
||||
"closure hours %s",
|
||||
when,
|
||||
hp,
|
||||
exceptional_closures_hours,
|
||||
)
|
||||
for cl in reversed(exceptional_closures_hours):
|
||||
if cl["start"] <= hp["start"] and cl["stop"] >= hp["stop"]:
|
||||
log.debug(
|
||||
"previous_opening_hour(%s): opening hour %s is included in exceptional "
|
||||
"closure hours %s",
|
||||
when,
|
||||
hp,
|
||||
cl,
|
||||
)
|
||||
exceptionally_closed = True
|
||||
break
|
||||
if cl["stop"] < hp["stop"]:
|
||||
log.debug(
|
||||
"previous_opening_hour(%s): opening hour %s end after closure hours %s",
|
||||
when,
|
||||
hp,
|
||||
cl,
|
||||
)
|
||||
test_time = hp["stop"] if hp["stop"] > test_time else test_time
|
||||
elif cl["start"] > hp["stop"]:
|
||||
log.debug(
|
||||
"previous_opening_hour(%s): opening hour %s start before closure hours "
|
||||
"%s",
|
||||
when,
|
||||
hp,
|
||||
cl,
|
||||
)
|
||||
test_time = hp["stop"] if hp["stop"] > test_time else test_time
|
||||
elif cl["stop"] >= hp["stop"] and cl["start"] > hp["start"]:
|
||||
log.debug(
|
||||
"previous_opening_hour(%s): opening hour %s start before closure hours "
|
||||
"%s",
|
||||
when,
|
||||
hp,
|
||||
cl,
|
||||
)
|
||||
test_time = cl["start"] if cl["start"] > test_time else test_time
|
||||
elif hp["stop"] > test_time:
|
||||
log.debug(
|
||||
"previous_opening_hour(%s): no exceptional closure hours, use opening hours "
|
||||
"stop time %s",
|
||||
when,
|
||||
hp["stop"],
|
||||
)
|
||||
test_time = hp["stop"]
|
||||
|
||||
if test_time > datetime.datetime.min.time():
|
||||
if date == when.date() and test_time > when.time():
|
||||
test_time = when.time()
|
||||
candidate_previous_opening_datetime = datetime.datetime.combine(date, test_time)
|
||||
previous_opening_datetime = (
|
||||
candidate_previous_opening_datetime
|
||||
if not previous_opening_datetime
|
||||
or candidate_previous_opening_datetime > previous_opening_datetime
|
||||
else previous_opening_datetime
|
||||
)
|
||||
|
||||
if not previous_opening_datetime and (
|
||||
exceptionally_closed or (date == when.date() and not in_opening_hours)
|
||||
):
|
||||
new_max_anaylse_days = max_anaylse_days - (when.date() - date).days
|
||||
if new_max_anaylse_days > 0:
|
||||
log.debug(
|
||||
"previous_opening_hour(%s): exceptionally closed on %s, try on previous %d days",
|
||||
when,
|
||||
date,
|
||||
new_max_anaylse_days,
|
||||
)
|
||||
previous_opening_datetime = previous_opening_hour(
|
||||
normal_opening_hours_values=normal_opening_hours_values,
|
||||
exceptional_closures_values=exceptional_closures_values,
|
||||
nonworking_public_holidays_values=nonworking_public_holidays_values,
|
||||
when=datetime.datetime.combine(
|
||||
date - datetime.timedelta(days=1), datetime.datetime.max.time()
|
||||
),
|
||||
max_anaylse_days=new_max_anaylse_days,
|
||||
parse=False,
|
||||
)
|
||||
if not previous_opening_datetime:
|
||||
log.debug(
|
||||
"previous_opening_hour(%s): no opening hours found in previous %d days",
|
||||
when,
|
||||
max_anaylse_days,
|
||||
)
|
||||
return False
|
||||
log.debug(
|
||||
"previous_opening_hour(%s): previous opening hours=%s", when, previous_opening_datetime
|
||||
)
|
||||
return previous_opening_datetime
|
106
mylib/oracle.py
Normal file
106
mylib/oracle.py
Normal file
|
@ -0,0 +1,106 @@
|
|||
""" Oracle client """
|
||||
|
||||
import logging
|
||||
import sys
|
||||
|
||||
import cx_Oracle
|
||||
|
||||
from mylib.db import DB, DBFailToConnect
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class OracleDB(DB):
|
||||
"""Oracle client"""
|
||||
|
||||
_dsn = None
|
||||
_user = None
|
||||
_pwd = None
|
||||
|
||||
def __init__(self, dsn, user, pwd, **kwargs):
|
||||
self._dsn = dsn
|
||||
self._user = user
|
||||
self._pwd = pwd
|
||||
super().__init__(**kwargs)
|
||||
|
||||
def connect(self, exit_on_error=True):
|
||||
"""Connect to Oracle server"""
|
||||
if self._conn is None:
|
||||
log.info("Connect on Oracle server with DSN %s as %s", self._dsn, self._user)
|
||||
try:
|
||||
self._conn = cx_Oracle.connect(user=self._user, password=self._pwd, dsn=self._dsn)
|
||||
except cx_Oracle.Error as err:
|
||||
log.fatal(
|
||||
"An error occurred during Oracle database connection (%s@%s).",
|
||||
self._user,
|
||||
self._dsn,
|
||||
exc_info=1,
|
||||
)
|
||||
if exit_on_error:
|
||||
sys.exit(1)
|
||||
else:
|
||||
raise DBFailToConnect(f"{self._user}@{self._dsn}") from err
|
||||
return True
|
||||
|
||||
def doSQL(self, sql, params=None):
|
||||
"""
|
||||
Run SQL query and commit changes (rollback on error)
|
||||
|
||||
:param sql: The SQL query
|
||||
:param params: The SQL query's parameters as dict (optional)
|
||||
|
||||
:return: True on success, False otherwise
|
||||
:rtype: bool
|
||||
"""
|
||||
if self.just_try:
|
||||
log.debug("Just-try mode : do not really execute SQL query '%s'", sql)
|
||||
return True
|
||||
|
||||
try:
|
||||
self._log_query(sql, params)
|
||||
with self._conn.cursor() as cursor:
|
||||
if isinstance(params, dict):
|
||||
cursor.execute(sql, **params)
|
||||
else:
|
||||
cursor.execute(sql)
|
||||
self._conn.commit()
|
||||
return True
|
||||
except cx_Oracle.Error:
|
||||
self._log_query_exception(sql, params)
|
||||
self._conn.rollback()
|
||||
return False
|
||||
|
||||
def doSelect(self, sql, params=None):
|
||||
"""
|
||||
Run SELECT SQL query and return list of selected rows as dict
|
||||
|
||||
:param sql: The SQL query
|
||||
:param params: The SQL query's parameters as dict (optional)
|
||||
|
||||
:return: List of selected rows as dict on success, False otherwise
|
||||
:rtype: list, bool
|
||||
"""
|
||||
try:
|
||||
self._log_query(sql, params)
|
||||
with self._conn.cursor() as cursor:
|
||||
if isinstance(params, dict):
|
||||
cursor.execute(sql, **params)
|
||||
else:
|
||||
cursor.execute(sql)
|
||||
cursor.rowfactory = lambda *args: dict(
|
||||
zip([d[0] for d in cursor.description], args)
|
||||
)
|
||||
results = cursor.fetchall()
|
||||
return results
|
||||
except cx_Oracle.Error:
|
||||
self._log_query_exception(sql, params)
|
||||
return False
|
||||
|
||||
#
|
||||
# SQL helpers
|
||||
#
|
||||
|
||||
@staticmethod
|
||||
def format_param(param):
|
||||
"""Format SQL query parameter for prepared query"""
|
||||
return f":{param}"
|
52
mylib/pbar.py
Normal file
52
mylib/pbar.py
Normal file
|
@ -0,0 +1,52 @@
|
|||
""" Progress bar """
|
||||
|
||||
import logging
|
||||
|
||||
import progressbar
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Pbar: # pylint: disable=useless-object-inheritance
|
||||
"""
|
||||
Progress bar
|
||||
|
||||
This class abstract a progress bar that could be enable/disable by
|
||||
configuration/script parameters.
|
||||
"""
|
||||
|
||||
__pbar = None
|
||||
__count = None
|
||||
|
||||
def __init__(self, name, maxval, enabled=True):
|
||||
if enabled and maxval:
|
||||
self.__count = 0
|
||||
self.__pbar = progressbar.ProgressBar(
|
||||
widgets=[
|
||||
name + ": ",
|
||||
progressbar.Percentage(),
|
||||
" ",
|
||||
progressbar.Bar(),
|
||||
" ",
|
||||
progressbar.SimpleProgress(),
|
||||
progressbar.ETA(),
|
||||
],
|
||||
maxval=maxval,
|
||||
).start()
|
||||
else:
|
||||
log.info(name)
|
||||
|
||||
def increment(self, step=None):
|
||||
"""
|
||||
Increment the progress bar
|
||||
|
||||
:param step: The step (optional, default: 1)
|
||||
"""
|
||||
if self.__pbar:
|
||||
self.__count += step if step else 1
|
||||
self.__pbar.update(self.__count)
|
||||
|
||||
def finish(self):
|
||||
"""Finish the progress bar"""
|
||||
if self.__pbar:
|
||||
self.__pbar.finish()
|
169
mylib/pgsql.py
Normal file
169
mylib/pgsql.py
Normal file
|
@ -0,0 +1,169 @@
|
|||
""" PostgreSQL client """
|
||||
|
||||
import datetime
|
||||
import logging
|
||||
import sys
|
||||
|
||||
import psycopg2
|
||||
from psycopg2.extras import RealDictCursor
|
||||
|
||||
from mylib.db import DB, DBFailToConnect
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class PgDB(DB):
|
||||
"""PostgreSQL client"""
|
||||
|
||||
_host = None
|
||||
_user = None
|
||||
_pwd = None
|
||||
_db = None
|
||||
|
||||
date_format = "%Y-%m-%d"
|
||||
datetime_format = "%Y-%m-%d %H:%M:%S"
|
||||
|
||||
def __init__(self, host, user, pwd, db, **kwargs):
|
||||
self._host = host
|
||||
self._user = user
|
||||
self._pwd = pwd
|
||||
self._db = db
|
||||
super().__init__(**kwargs)
|
||||
|
||||
def connect(self, exit_on_error=True):
|
||||
"""Connect to PostgreSQL server"""
|
||||
if self._conn is None:
|
||||
try:
|
||||
log.info(
|
||||
"Connect on PostgreSQL server %s as %s on database %s",
|
||||
self._host,
|
||||
self._user,
|
||||
self._db,
|
||||
)
|
||||
self._conn = psycopg2.connect(
|
||||
dbname=self._db, user=self._user, host=self._host, password=self._pwd
|
||||
)
|
||||
except psycopg2.Error as err:
|
||||
log.fatal(
|
||||
"An error occurred during Postgresql database connection (%s@%s, database=%s).",
|
||||
self._user,
|
||||
self._host,
|
||||
self._db,
|
||||
exc_info=1,
|
||||
)
|
||||
if exit_on_error:
|
||||
sys.exit(1)
|
||||
else:
|
||||
raise DBFailToConnect(f"{self._user}@{self._host}:{self._db}") from err
|
||||
return True
|
||||
|
||||
def close(self):
|
||||
"""Close connection with PostgreSQL server (if opened)"""
|
||||
if self._conn:
|
||||
self._conn.close()
|
||||
self._conn = None
|
||||
|
||||
def setEncoding(self, enc):
|
||||
"""Set connection encoding"""
|
||||
if self._conn:
|
||||
try:
|
||||
self._conn.set_client_encoding(enc)
|
||||
return True
|
||||
except psycopg2.Error:
|
||||
log.error(
|
||||
'An error occurred setting Postgresql database connection encoding to "%s"',
|
||||
enc,
|
||||
exc_info=1,
|
||||
)
|
||||
return False
|
||||
|
||||
def doSQL(self, sql, params=None):
|
||||
"""
|
||||
Run SQL query and commit changes (rollback on error)
|
||||
|
||||
:param sql: The SQL query
|
||||
:param params: The SQL query's parameters as dict (optional)
|
||||
|
||||
:return: True on success, False otherwise
|
||||
:rtype: bool
|
||||
"""
|
||||
if self.just_try:
|
||||
log.debug("Just-try mode : do not really execute SQL query '%s'", sql)
|
||||
return True
|
||||
|
||||
cursor = self._conn.cursor()
|
||||
try:
|
||||
self._log_query(sql, params)
|
||||
if params is None:
|
||||
cursor.execute(sql)
|
||||
else:
|
||||
cursor.execute(sql, params)
|
||||
self._conn.commit()
|
||||
return True
|
||||
except psycopg2.Error:
|
||||
self._log_query_exception(sql, params)
|
||||
self._conn.rollback()
|
||||
return False
|
||||
|
||||
def doSelect(self, sql, params=None):
|
||||
"""
|
||||
Run SELECT SQL query and return list of selected rows as dict
|
||||
|
||||
:param sql: The SQL query
|
||||
:param params: The SQL query's parameters as dict (optional)
|
||||
|
||||
:return: List of selected rows as dict on success, False otherwise
|
||||
:rtype: list, bool
|
||||
"""
|
||||
cursor = self._conn.cursor(cursor_factory=RealDictCursor)
|
||||
try:
|
||||
self._log_query(sql, params)
|
||||
cursor.execute(sql, params)
|
||||
results = cursor.fetchall()
|
||||
return list(map(dict, results))
|
||||
except psycopg2.Error:
|
||||
self._log_query_exception(sql, params)
|
||||
return False
|
||||
|
||||
#
|
||||
# Deprecated helpers
|
||||
#
|
||||
|
||||
@classmethod
|
||||
def _quote_value(cls, value):
|
||||
"""Quote a value for SQL query"""
|
||||
if value is None:
|
||||
return "NULL"
|
||||
|
||||
if isinstance(value, (int, float)):
|
||||
return str(value)
|
||||
|
||||
if isinstance(value, datetime.datetime):
|
||||
value = cls._format_datetime(value)
|
||||
elif isinstance(value, datetime.date):
|
||||
value = cls._format_date(value)
|
||||
|
||||
# pylint: disable=consider-using-f-string
|
||||
return "'{}'".format(value.replace("'", "''"))
|
||||
|
||||
@classmethod
|
||||
def _format_datetime(cls, value):
|
||||
"""Format datetime object as string"""
|
||||
assert isinstance(value, datetime.datetime)
|
||||
return value.strftime(cls.datetime_format)
|
||||
|
||||
@classmethod
|
||||
def _format_date(cls, value):
|
||||
"""Format date object as string"""
|
||||
assert isinstance(value, (datetime.date, datetime.datetime))
|
||||
return value.strftime(cls.date_format)
|
||||
|
||||
@classmethod
|
||||
def time2datetime(cls, time):
|
||||
"""Convert timestamp to datetime string"""
|
||||
return cls._format_datetime(datetime.datetime.fromtimestamp(int(time)))
|
||||
|
||||
@classmethod
|
||||
def time2date(cls, time):
|
||||
"""Convert timestamp to date string"""
|
||||
return cls._format_date(datetime.date.fromtimestamp(int(time)))
|
155
mylib/report.py
Normal file
155
mylib/report.py
Normal file
|
@ -0,0 +1,155 @@
|
|||
""" Report """
|
||||
|
||||
import atexit
|
||||
import logging
|
||||
|
||||
from mylib.config import ConfigurableObject, StringOption
|
||||
from mylib.email import EmailClient
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Report(ConfigurableObject): # pylint: disable=useless-object-inheritance
|
||||
"""Logging report"""
|
||||
|
||||
_config_name = "report"
|
||||
_config_comment = "Email report"
|
||||
_defaults = {
|
||||
"recipient": None,
|
||||
"subject": "Report",
|
||||
"loglevel": "WARNING",
|
||||
"logformat": "%(asctime)s - %(levelname)s - %(message)s",
|
||||
"just_try": False,
|
||||
}
|
||||
|
||||
content = []
|
||||
handler = None
|
||||
formatter = None
|
||||
email_client = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
email_client=None,
|
||||
add_logging_handler=False,
|
||||
send_at_exit=None,
|
||||
initialize=True,
|
||||
**kwargs,
|
||||
):
|
||||
super().__init__(**kwargs)
|
||||
self.email_client = email_client
|
||||
self.add_logging_handler = add_logging_handler
|
||||
self._send_at_exit = send_at_exit
|
||||
self._attachment_files = []
|
||||
self._attachment_payloads = []
|
||||
|
||||
if initialize:
|
||||
self.initialize()
|
||||
|
||||
def configure(self, **kwargs): # pylint: disable=arguments-differ
|
||||
"""Configure options on registered mylib.Config object"""
|
||||
section = super().configure(
|
||||
just_try_help=kwargs.pop("just_try_help", "Just-try mode: do not really send report"),
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
section.add_option(StringOption, "recipient", comment="Report recipient email address")
|
||||
section.add_option(
|
||||
StringOption,
|
||||
"subject",
|
||||
default=self._defaults["subject"],
|
||||
comment="Report email subject",
|
||||
)
|
||||
section.add_option(
|
||||
StringOption,
|
||||
"loglevel",
|
||||
default=self._defaults["loglevel"],
|
||||
comment='Report log level (as accept by python logging, for instance "INFO")',
|
||||
)
|
||||
section.add_option(
|
||||
StringOption,
|
||||
"logformat",
|
||||
default=self._defaults["logformat"],
|
||||
comment='Report log level (as accept by python logging, for instance "INFO")',
|
||||
)
|
||||
|
||||
if not self.email_client:
|
||||
self.email_client = EmailClient(config=self._config)
|
||||
self.email_client.configure()
|
||||
|
||||
return section
|
||||
|
||||
def initialize(self, loaded_config=None):
|
||||
"""Configuration initialized hook"""
|
||||
super().initialize(loaded_config=loaded_config)
|
||||
self.handler = logging.StreamHandler(self)
|
||||
|
||||
loglevel = self._get_option("loglevel").upper()
|
||||
assert hasattr(logging, loglevel), f"Invalid report loglevel {loglevel}"
|
||||
self.handler.setLevel(getattr(logging, loglevel))
|
||||
|
||||
self.formatter = logging.Formatter(self._get_option("logformat"))
|
||||
self.handler.setFormatter(self.formatter)
|
||||
|
||||
if self.add_logging_handler:
|
||||
logging.getLogger().addHandler(self.handler)
|
||||
if self._send_at_exit:
|
||||
self.send_at_exit()
|
||||
|
||||
def get_handler(self):
|
||||
"""Retrieve logging handler"""
|
||||
return self.handler
|
||||
|
||||
def write(self, msg):
|
||||
"""Write a message"""
|
||||
self.content.append(msg)
|
||||
|
||||
def get_content(self):
|
||||
"""Read the report content"""
|
||||
return "".join(self.content)
|
||||
|
||||
def add_attachment_file(self, filepath):
|
||||
"""Add attachment file"""
|
||||
self._attachment_files.append(filepath)
|
||||
|
||||
def add_attachment_payload(self, payload):
|
||||
"""Add attachment payload"""
|
||||
self._attachment_payloads.append(payload)
|
||||
|
||||
def send(self, subject=None, rcpt_to=None, email_client=None, just_try=None):
|
||||
"""Send report using an EmailClient"""
|
||||
if rcpt_to is None:
|
||||
rcpt_to = self._get_option("recipient")
|
||||
if not rcpt_to:
|
||||
log.debug("No report recipient, do not send report")
|
||||
return True
|
||||
if subject is None:
|
||||
subject = self._get_option("subject")
|
||||
assert subject, "You must provide report subject using Report.__init__ or Report.send"
|
||||
if email_client is None:
|
||||
email_client = self.email_client
|
||||
assert email_client, (
|
||||
"You must provide an email client __init__(), send() or send_at_exit() methods argument"
|
||||
" email_client"
|
||||
)
|
||||
content = self.get_content()
|
||||
if not content and not self._attachment_files and not self._attachment_payloads:
|
||||
log.debug("Report is empty, do not send it")
|
||||
return True
|
||||
msg = email_client.forge_message(
|
||||
rcpt_to,
|
||||
subject=subject,
|
||||
text_body=content,
|
||||
attachment_files=self._attachment_files,
|
||||
attachment_payloads=self._attachment_payloads,
|
||||
)
|
||||
if email_client.send(
|
||||
rcpt_to, msg=msg, just_try=just_try if just_try is not None else self._just_try
|
||||
):
|
||||
log.debug("Report sent to %s", rcpt_to)
|
||||
return True
|
||||
log.error("Fail to send report to %s", rcpt_to)
|
||||
return False
|
||||
|
||||
def send_at_exit(self, **kwargs):
|
||||
"""Send report at exit"""
|
||||
atexit.register(self.send, **kwargs)
|
0
mylib/scripts/__init__.py
Normal file
0
mylib/scripts/__init__.py
Normal file
62
mylib/scripts/email_templates/header.svg
Normal file
62
mylib/scripts/email_templates/header.svg
Normal file
|
@ -0,0 +1,62 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<!-- Created with Inkscape (http://www.inkscape.org/) -->
|
||||
|
||||
<svg
|
||||
width="92.738403mm"
|
||||
height="17.141003mm"
|
||||
viewBox="0 0 92.738403 17.141003"
|
||||
version="1.1"
|
||||
id="svg5"
|
||||
inkscape:version="1.2.2 (b0a8486541, 2022-12-01)"
|
||||
sodipodi:docname="header.svg"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg">
|
||||
<sodipodi:namedview
|
||||
id="namedview7"
|
||||
pagecolor="#ffffff"
|
||||
bordercolor="#666666"
|
||||
borderopacity="1.0"
|
||||
inkscape:showpageshadow="2"
|
||||
inkscape:pageopacity="0.0"
|
||||
inkscape:pagecheckerboard="0"
|
||||
inkscape:deskcolor="#d1d1d1"
|
||||
inkscape:document-units="mm"
|
||||
showgrid="false"
|
||||
inkscape:zoom="0.84096521"
|
||||
inkscape:cx="92.156012"
|
||||
inkscape:cy="315.11411"
|
||||
inkscape:window-width="1920"
|
||||
inkscape:window-height="1011"
|
||||
inkscape:window-x="0"
|
||||
inkscape:window-y="32"
|
||||
inkscape:window-maximized="1"
|
||||
inkscape:current-layer="layer1" />
|
||||
<defs
|
||||
id="defs2" />
|
||||
<g
|
||||
inkscape:label="Calque 1"
|
||||
inkscape:groupmode="layer"
|
||||
id="layer1"
|
||||
transform="translate(-44.730637,-68.858589)">
|
||||
<rect
|
||||
style="fill:none;stroke:#004787;stroke-width:2;stroke-linecap:round;stroke-linejoin:round;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
|
||||
id="rect234"
|
||||
width="90.738403"
|
||||
height="15.141003"
|
||||
x="45.730637"
|
||||
y="69.858589" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:5.29167px;line-height:125%;font-family:'DejaVu Sans Mono';-inkscape-font-specification:'DejaVu Sans Mono';letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.264583px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
x="82.664459"
|
||||
y="79.623909"
|
||||
id="text2144"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan2142"
|
||||
style="fill:#004787;fill-opacity:1;stroke-width:0.264583px"
|
||||
x="82.664459"
|
||||
y="79.623909">Header</tspan></text>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 2.2 KiB |
2
mylib/scripts/email_templates/test.html
Normal file
2
mylib/scripts/email_templates/test.html
Normal file
|
@ -0,0 +1,2 @@
|
|||
<img src="${load_image_as_base64('header.svg')}" style="display: block" />
|
||||
<p><strong>Just a test email.</strong> <small>(sent at ${sent_date})</small></p>
|
1
mylib/scripts/email_templates/test.subject
Normal file
1
mylib/scripts/email_templates/test.subject
Normal file
|
@ -0,0 +1 @@
|
|||
Test email
|
1
mylib/scripts/email_templates/test.txt
Normal file
1
mylib/scripts/email_templates/test.txt
Normal file
|
@ -0,0 +1 @@
|
|||
Just a test email sent at ${sent_date}.
|
93
mylib/scripts/email_test.py
Normal file
93
mylib/scripts/email_test.py
Normal file
|
@ -0,0 +1,93 @@
|
|||
""" Test Email client """
|
||||
import datetime
|
||||
import getpass
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
|
||||
from mylib.scripts.helpers import add_email_opts, get_opts_parser, init_email_client, init_logging
|
||||
|
||||
log = logging.getLogger("mylib.scripts.email_test")
|
||||
|
||||
|
||||
def main(argv=None): # pylint: disable=too-many-locals,too-many-statements
|
||||
"""Script main"""
|
||||
if argv is None:
|
||||
argv = sys.argv[1:]
|
||||
|
||||
# Options parser
|
||||
parser = get_opts_parser(just_try=True)
|
||||
add_email_opts(
|
||||
parser,
|
||||
templates_path=os.path.join(os.path.dirname(os.path.realpath(__file__)), "email_templates"),
|
||||
)
|
||||
|
||||
test_opts = parser.add_argument_group("Test email options")
|
||||
|
||||
test_opts.add_argument(
|
||||
"-t",
|
||||
"--to",
|
||||
action="store",
|
||||
type=str,
|
||||
dest="test_to",
|
||||
help="Test email recipient(s)",
|
||||
nargs="+",
|
||||
)
|
||||
|
||||
test_opts.add_argument(
|
||||
"-T",
|
||||
"--template",
|
||||
action="store_true",
|
||||
help="Template name to send (default: test)",
|
||||
default="test",
|
||||
)
|
||||
|
||||
test_opts.add_argument(
|
||||
"--cc",
|
||||
action="store",
|
||||
type=str,
|
||||
dest="test_cc",
|
||||
help="Test CC email recipient(s)",
|
||||
nargs="+",
|
||||
)
|
||||
|
||||
test_opts.add_argument(
|
||||
"--bcc",
|
||||
action="store",
|
||||
type=str,
|
||||
dest="test_bcc",
|
||||
help="Test BCC email recipient(s)",
|
||||
nargs="+",
|
||||
)
|
||||
|
||||
options = parser.parse_args()
|
||||
|
||||
if not options.test_to:
|
||||
parser.error("You must specify at least one test email recipient using -t/--to parameter")
|
||||
sys.exit(1)
|
||||
|
||||
# Initialize logs
|
||||
init_logging(options, "Test EmailClient")
|
||||
|
||||
if options.email_smtp_user and not options.email_smtp_password:
|
||||
options.email_smtp_password = getpass.getpass("Please enter SMTP password: ")
|
||||
|
||||
email_client = init_email_client(options)
|
||||
|
||||
log.info(
|
||||
"Send a test email to %s (CC: %s / BCC: %s)",
|
||||
", ".join(options.test_to),
|
||||
", ".join(options.test_cc) if options.test_cc else None,
|
||||
", ".join(options.test_bcc) if options.test_bcc else None,
|
||||
)
|
||||
if email_client.send(
|
||||
options.test_to,
|
||||
cc=options.test_cc,
|
||||
bcc=options.test_bcc,
|
||||
template=options.template,
|
||||
sent_date=datetime.datetime.now(),
|
||||
):
|
||||
log.info("Test email sent")
|
||||
sys.exit(0)
|
||||
log.error("Fail to send test email")
|
||||
sys.exit(1)
|
93
mylib/scripts/email_test_with_config.py
Normal file
93
mylib/scripts/email_test_with_config.py
Normal file
|
@ -0,0 +1,93 @@
|
|||
""" Test Email client using mylib.config.Config for configuration """
|
||||
import datetime
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
|
||||
from mylib.config import Config
|
||||
from mylib.email import EmailClient
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def main(argv=None): # pylint: disable=too-many-locals,too-many-statements
|
||||
"""Script main"""
|
||||
if argv is None:
|
||||
argv = sys.argv[1:]
|
||||
|
||||
config = Config(__doc__, __name__.replace(".", "_"))
|
||||
|
||||
email_client = EmailClient(config=config)
|
||||
email_client.set_default(
|
||||
"templates_path",
|
||||
os.path.join(os.path.dirname(os.path.realpath(__file__)), "email_templates"),
|
||||
)
|
||||
email_client.configure(just_try=True)
|
||||
|
||||
# Options parser
|
||||
parser = config.get_arguments_parser(description=__doc__)
|
||||
|
||||
test_opts = parser.add_argument_group("Test email options")
|
||||
|
||||
test_opts.add_argument(
|
||||
"-t",
|
||||
"--to",
|
||||
action="store",
|
||||
type=str,
|
||||
dest="test_to",
|
||||
help="Test email recipient(s)",
|
||||
nargs="+",
|
||||
)
|
||||
|
||||
test_opts.add_argument(
|
||||
"-T",
|
||||
"--template",
|
||||
action="store_true",
|
||||
dest="template",
|
||||
help="Template name to send (default: test)",
|
||||
default="test",
|
||||
)
|
||||
|
||||
test_opts.add_argument(
|
||||
"-m",
|
||||
"--mako",
|
||||
action="store_true",
|
||||
dest="test_mako",
|
||||
help="Test mako templating",
|
||||
)
|
||||
|
||||
test_opts.add_argument(
|
||||
"--cc",
|
||||
action="store",
|
||||
type=str,
|
||||
dest="test_cc",
|
||||
help="Test CC email recipient(s)",
|
||||
nargs="+",
|
||||
)
|
||||
|
||||
test_opts.add_argument(
|
||||
"--bcc",
|
||||
action="store",
|
||||
type=str,
|
||||
dest="test_bcc",
|
||||
help="Test BCC email recipient(s)",
|
||||
nargs="+",
|
||||
)
|
||||
|
||||
options = config.parse_arguments_options()
|
||||
|
||||
if not options.test_to:
|
||||
parser.error("You must specify at least one test email recipient using -t/--to parameter")
|
||||
sys.exit(1)
|
||||
|
||||
if email_client.send(
|
||||
options.test_to,
|
||||
cc=options.test_cc,
|
||||
bcc=options.test_bcc,
|
||||
template="test",
|
||||
sent_date=datetime.datetime.now(),
|
||||
):
|
||||
logging.info("Test email sent")
|
||||
sys.exit(0)
|
||||
logging.error("Fail to send test email")
|
||||
sys.exit(1)
|
300
mylib/scripts/helpers.py
Normal file
300
mylib/scripts/helpers.py
Normal file
|
@ -0,0 +1,300 @@
|
|||
""" Scripts helpers """
|
||||
|
||||
import argparse
|
||||
import getpass
|
||||
import logging
|
||||
import os.path
|
||||
import socket
|
||||
import sys
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def init_logging(options, name, report=None):
|
||||
"""Initialize logging from calling script options"""
|
||||
logformat = f"%(asctime)s - {name} - %(levelname)s - %(message)s"
|
||||
if options.debug:
|
||||
loglevel = logging.DEBUG
|
||||
elif options.verbose:
|
||||
loglevel = logging.INFO
|
||||
else:
|
||||
loglevel = logging.WARNING
|
||||
|
||||
handlers = []
|
||||
if options.logfile:
|
||||
handlers.append(logging.FileHandler(options.logfile))
|
||||
if not options.logfile or options.console:
|
||||
handlers.append(logging.StreamHandler())
|
||||
if report:
|
||||
handlers.append(report.get_handler())
|
||||
logging.basicConfig(level=loglevel, format=logformat, handlers=handlers)
|
||||
|
||||
|
||||
def get_default_opt_value(config, default_config, key):
|
||||
"""Retrieve default option value from config or default config dictionaries"""
|
||||
if config and key in config:
|
||||
return config[key]
|
||||
return default_config.get(key)
|
||||
|
||||
|
||||
def get_opts_parser(
|
||||
desc=None, just_try=False, just_one=False, progress=False, config=None, **kwargs
|
||||
):
|
||||
"""Retrieve options parser"""
|
||||
default_config = {"logfile": None}
|
||||
|
||||
parser = argparse.ArgumentParser(description=desc, **kwargs)
|
||||
|
||||
parser.add_argument(
|
||||
"-v", "--verbose", action="store_true", dest="verbose", help="Enable verbose mode"
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-d", "--debug", action="store_true", dest="debug", help="Enable debug mode"
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-l",
|
||||
"--log-file",
|
||||
action="store",
|
||||
type=str,
|
||||
dest="logfile",
|
||||
help=f'Log file path (default: {get_default_opt_value(config, default_config, "logfile")})',
|
||||
default=get_default_opt_value(config, default_config, "logfile"),
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-C",
|
||||
"--console",
|
||||
action="store_true",
|
||||
dest="console",
|
||||
help="Always log on console (even if log file is configured)",
|
||||
)
|
||||
|
||||
if just_try:
|
||||
parser.add_argument(
|
||||
"-j", "--just-try", action="store_true", dest="just_try", help="Enable just-try mode"
|
||||
)
|
||||
|
||||
if just_one:
|
||||
parser.add_argument(
|
||||
"-J", "--just-one", action="store_true", dest="just_one", help="Enable just-one mode"
|
||||
)
|
||||
|
||||
if progress:
|
||||
parser.add_argument(
|
||||
"-p", "--progress", action="store_true", dest="progress", help="Enable progress bar"
|
||||
)
|
||||
|
||||
return parser
|
||||
|
||||
|
||||
def add_email_opts(parser, config=None, **defaults):
|
||||
"""Add email options"""
|
||||
email_opts = parser.add_argument_group("Email options")
|
||||
|
||||
default_config = {
|
||||
"smtp_host": "127.0.0.1",
|
||||
"smtp_port": 25,
|
||||
"smtp_ssl": False,
|
||||
"smtp_tls": False,
|
||||
"smtp_user": None,
|
||||
"smtp_password": None,
|
||||
"smtp_debug": False,
|
||||
"email_encoding": sys.getdefaultencoding(),
|
||||
"sender_name": getpass.getuser(),
|
||||
"sender_email": f"{getpass.getuser()}@{socket.gethostname()}",
|
||||
"catch_all": False,
|
||||
"templates_path": None,
|
||||
}
|
||||
default_config.update(defaults)
|
||||
|
||||
email_opts.add_argument(
|
||||
"--smtp-host",
|
||||
action="store",
|
||||
type=str,
|
||||
dest="email_smtp_host",
|
||||
help=f'SMTP host (default: {get_default_opt_value(config, default_config, "smtp_host")})',
|
||||
default=get_default_opt_value(config, default_config, "smtp_host"),
|
||||
)
|
||||
|
||||
email_opts.add_argument(
|
||||
"--smtp-port",
|
||||
action="store",
|
||||
type=int,
|
||||
dest="email_smtp_port",
|
||||
help=f'SMTP port (default: {get_default_opt_value(config, default_config, "smtp_port")})',
|
||||
default=get_default_opt_value(config, default_config, "smtp_port"),
|
||||
)
|
||||
|
||||
email_opts.add_argument(
|
||||
"--smtp-ssl",
|
||||
action="store_true",
|
||||
dest="email_smtp_ssl",
|
||||
help=f'Use SSL (default: {get_default_opt_value(config, default_config, "smtp_ssl")})',
|
||||
default=get_default_opt_value(config, default_config, "smtp_ssl"),
|
||||
)
|
||||
|
||||
email_opts.add_argument(
|
||||
"--smtp-tls",
|
||||
action="store_true",
|
||||
dest="email_smtp_tls",
|
||||
help=f'Use TLS (default: {get_default_opt_value(config, default_config, "smtp_tls")})',
|
||||
default=get_default_opt_value(config, default_config, "smtp_tls"),
|
||||
)
|
||||
|
||||
email_opts.add_argument(
|
||||
"--smtp-user",
|
||||
action="store",
|
||||
type=str,
|
||||
dest="email_smtp_user",
|
||||
help=(
|
||||
f'SMTP username (default: {get_default_opt_value(config, default_config, "smtp_user")})'
|
||||
),
|
||||
default=get_default_opt_value(config, default_config, "smtp_user"),
|
||||
)
|
||||
|
||||
email_opts.add_argument(
|
||||
"--smtp-password",
|
||||
action="store",
|
||||
type=str,
|
||||
dest="email_smtp_password",
|
||||
help=(
|
||||
"SMTP password (default:"
|
||||
f' {get_default_opt_value(config, default_config, "smtp_password")})'
|
||||
),
|
||||
default=get_default_opt_value(config, default_config, "smtp_password"),
|
||||
)
|
||||
|
||||
email_opts.add_argument(
|
||||
"--smtp-debug",
|
||||
action="store_true",
|
||||
dest="email_smtp_debug",
|
||||
help=(
|
||||
"Debug SMTP connection (default:"
|
||||
f' {get_default_opt_value(config, default_config, "smtp_debug")})'
|
||||
),
|
||||
default=get_default_opt_value(config, default_config, "smtp_debug"),
|
||||
)
|
||||
|
||||
email_opts.add_argument(
|
||||
"--email-encoding",
|
||||
action="store",
|
||||
type=str,
|
||||
dest="email_encoding",
|
||||
help=(
|
||||
"SMTP encoding (default:"
|
||||
f' {get_default_opt_value(config, default_config, "email_encoding")})'
|
||||
),
|
||||
default=get_default_opt_value(config, default_config, "email_encoding"),
|
||||
)
|
||||
|
||||
email_opts.add_argument(
|
||||
"--sender-name",
|
||||
action="store",
|
||||
type=str,
|
||||
dest="email_sender_name",
|
||||
help=(
|
||||
f'Sender name (default: {get_default_opt_value(config, default_config, "sender_name")})'
|
||||
),
|
||||
default=get_default_opt_value(config, default_config, "sender_name"),
|
||||
)
|
||||
|
||||
email_opts.add_argument(
|
||||
"--sender-email",
|
||||
action="store",
|
||||
type=str,
|
||||
dest="email_sender_email",
|
||||
help=(
|
||||
"Sender email (default:"
|
||||
f' {get_default_opt_value(config, default_config, "sender_email")})'
|
||||
),
|
||||
default=get_default_opt_value(config, default_config, "sender_email"),
|
||||
)
|
||||
|
||||
email_opts.add_argument(
|
||||
"--catch-all",
|
||||
action="store",
|
||||
type=str,
|
||||
dest="email_catch_all",
|
||||
help=(
|
||||
"Catch all sent email: specify catch recipient email address "
|
||||
f'(default: {get_default_opt_value(config, default_config, "catch_all")})'
|
||||
),
|
||||
default=get_default_opt_value(config, default_config, "catch_all"),
|
||||
)
|
||||
|
||||
email_opts.add_argument(
|
||||
"--templates-path",
|
||||
action="store",
|
||||
type=str,
|
||||
dest="email_templates_path",
|
||||
help=(
|
||||
"Load templates from specify directory "
|
||||
f'(default: {get_default_opt_value(config, default_config, "templates_path")})'
|
||||
),
|
||||
default=get_default_opt_value(config, default_config, "templates_path"),
|
||||
)
|
||||
|
||||
|
||||
def init_email_client(options, **kwargs):
|
||||
"""Initialize email client from calling script options"""
|
||||
from mylib.email import EmailClient # pylint: disable=import-outside-toplevel
|
||||
|
||||
log.info("Initialize Email client")
|
||||
return EmailClient(options=options, initialize=True, **kwargs)
|
||||
|
||||
|
||||
def add_sftp_opts(parser):
|
||||
"""Add SFTP options to argpase.ArgumentParser"""
|
||||
sftp_opts = parser.add_argument_group("SFTP options")
|
||||
|
||||
sftp_opts.add_argument(
|
||||
"-H",
|
||||
"--sftp-host",
|
||||
action="store",
|
||||
type=str,
|
||||
dest="sftp_host",
|
||||
help="SFTP Host (default: localhost)",
|
||||
default="localhost",
|
||||
)
|
||||
|
||||
sftp_opts.add_argument(
|
||||
"--sftp-port",
|
||||
action="store",
|
||||
type=int,
|
||||
dest="sftp_port",
|
||||
help="SFTP Port (default: 22)",
|
||||
default=22,
|
||||
)
|
||||
|
||||
sftp_opts.add_argument(
|
||||
"-u", "--sftp-user", action="store", type=str, dest="sftp_user", help="SFTP User"
|
||||
)
|
||||
|
||||
sftp_opts.add_argument(
|
||||
"-P",
|
||||
"--sftp-password",
|
||||
action="store",
|
||||
type=str,
|
||||
dest="sftp_password",
|
||||
help="SFTP Password",
|
||||
)
|
||||
|
||||
sftp_opts.add_argument(
|
||||
"--sftp-known-hosts",
|
||||
action="store",
|
||||
type=str,
|
||||
dest="sftp_known_hosts",
|
||||
help="SFTP known_hosts file path (default: ~/.ssh/known_hosts)",
|
||||
default=os.path.expanduser("~/.ssh/known_hosts"),
|
||||
)
|
||||
|
||||
sftp_opts.add_argument(
|
||||
"--sftp-auto-add-unknown-host-key",
|
||||
action="store_true",
|
||||
dest="sftp_auto_add_unknown_host_key",
|
||||
help="Auto-add unknown SSH host key",
|
||||
)
|
||||
|
||||
return sftp_opts
|
142
mylib/scripts/ldap_test.py
Normal file
142
mylib/scripts/ldap_test.py
Normal file
|
@ -0,0 +1,142 @@
|
|||
""" Test LDAP """
|
||||
import datetime
|
||||
import logging
|
||||
import sys
|
||||
|
||||
import dateutil.tz
|
||||
import pytz
|
||||
|
||||
from mylib.ldap import format_date, format_datetime, parse_date, parse_datetime
|
||||
from mylib.scripts.helpers import get_opts_parser, init_logging
|
||||
|
||||
log = logging.getLogger("mylib.scripts.ldap_test")
|
||||
|
||||
|
||||
def main(argv=None): # pylint: disable=too-many-locals,too-many-statements
|
||||
"""Script main"""
|
||||
if argv is None:
|
||||
argv = sys.argv[1:]
|
||||
|
||||
# Options parser
|
||||
parser = get_opts_parser(just_try=True)
|
||||
options = parser.parse_args()
|
||||
|
||||
# Initialize logs
|
||||
init_logging(options, "Test LDAP helpers")
|
||||
|
||||
now = datetime.datetime.now().replace(tzinfo=dateutil.tz.tzlocal())
|
||||
print(f"Now = {now}")
|
||||
|
||||
datestring_now = format_datetime(now)
|
||||
print(f"format_datetime : {datestring_now}")
|
||||
print(
|
||||
"format_datetime (from_timezone=utc) :"
|
||||
f" {format_datetime(now.replace(tzinfo=None), from_timezone=pytz.utc)}"
|
||||
)
|
||||
print(
|
||||
"format_datetime (from_timezone=local) :"
|
||||
f" {format_datetime(now.replace(tzinfo=None), from_timezone=dateutil.tz.tzlocal())}"
|
||||
)
|
||||
print(
|
||||
"format_datetime (from_timezone=local) :"
|
||||
f' {format_datetime(now.replace(tzinfo=None), from_timezone="local")}'
|
||||
)
|
||||
print(
|
||||
"format_datetime (from_timezone=Paris) :"
|
||||
f' {format_datetime(now.replace(tzinfo=None), from_timezone="Europe/Paris")}'
|
||||
)
|
||||
print(f"format_datetime (to_timezone=utc) : {format_datetime(now, to_timezone=pytz.utc)}")
|
||||
print(
|
||||
"format_datetime (to_timezone=local) :"
|
||||
f" {format_datetime(now, to_timezone=dateutil.tz.tzlocal())}"
|
||||
)
|
||||
print(f'format_datetime (to_timezone=local) : {format_datetime(now, to_timezone="local")}')
|
||||
print(f'format_datetime (to_timezone=Tokyo) : {format_datetime(now, to_timezone="Asia/Tokyo")}')
|
||||
print(f"format_datetime (naive=True) : {format_datetime(now, naive=True)}")
|
||||
|
||||
print(f"format_date : {format_date(now)}")
|
||||
print(
|
||||
"format_date (from_timezone=utc) :"
|
||||
f" {format_date(now.replace(tzinfo=None), from_timezone=pytz.utc)}"
|
||||
)
|
||||
print(
|
||||
"format_date (from_timezone=local) :"
|
||||
f" {format_date(now.replace(tzinfo=None), from_timezone=dateutil.tz.tzlocal())}"
|
||||
)
|
||||
print(
|
||||
"format_date (from_timezone=local) :"
|
||||
f' {format_date(now.replace(tzinfo=None), from_timezone="local")}'
|
||||
)
|
||||
print(
|
||||
"format_date (from_timezone=Paris) :"
|
||||
f' {format_date(now.replace(tzinfo=None), from_timezone="Europe/Paris")}'
|
||||
)
|
||||
print(f"format_date (to_timezone=utc) : {format_date(now, to_timezone=pytz.utc)}")
|
||||
print(
|
||||
f"format_date (to_timezone=local) : {format_date(now, to_timezone=dateutil.tz.tzlocal())}"
|
||||
)
|
||||
print(f'format_date (to_timezone=local) : {format_date(now, to_timezone="local")}')
|
||||
print(f'format_date (to_timezone=Tokyo) : {format_date(now, to_timezone="Asia/Tokyo")}')
|
||||
print(f"format_date (naive=True) : {format_date(now, naive=True)}")
|
||||
|
||||
print(f"parse_datetime : {parse_datetime(datestring_now)}")
|
||||
print(
|
||||
"parse_datetime (default_timezone=utc) :"
|
||||
f" {parse_datetime(datestring_now[0:-1], default_timezone=pytz.utc)}"
|
||||
)
|
||||
print(
|
||||
"parse_datetime (default_timezone=local) :"
|
||||
f" {parse_datetime(datestring_now[0:-1], default_timezone=dateutil.tz.tzlocal())}"
|
||||
)
|
||||
print(
|
||||
"parse_datetime (default_timezone=local) :"
|
||||
f' {parse_datetime(datestring_now[0:-1], default_timezone="local")}'
|
||||
)
|
||||
print(
|
||||
"parse_datetime (default_timezone=Paris) :"
|
||||
f' {parse_datetime(datestring_now[0:-1], default_timezone="Europe/Paris")}'
|
||||
)
|
||||
print(
|
||||
f"parse_datetime (to_timezone=utc) : {parse_datetime(datestring_now, to_timezone=pytz.utc)}"
|
||||
)
|
||||
print(
|
||||
"parse_datetime (to_timezone=local) :"
|
||||
f" {parse_datetime(datestring_now, to_timezone=dateutil.tz.tzlocal())}"
|
||||
)
|
||||
print(
|
||||
"parse_datetime (to_timezone=local) :"
|
||||
f' {parse_datetime(datestring_now, to_timezone="local")}'
|
||||
)
|
||||
print(
|
||||
"parse_datetime (to_timezone=Tokyo) :"
|
||||
f' {parse_datetime(datestring_now, to_timezone="Asia/Tokyo")}'
|
||||
)
|
||||
print(f"parse_datetime (naive=True) : {parse_datetime(datestring_now, naive=True)}")
|
||||
|
||||
print(f"parse_date : {parse_date(datestring_now)}")
|
||||
print(
|
||||
"parse_date (default_timezone=utc) :"
|
||||
f" {parse_date(datestring_now[0:-1], default_timezone=pytz.utc)}"
|
||||
)
|
||||
print(
|
||||
"parse_date (default_timezone=local) :"
|
||||
f" {parse_date(datestring_now[0:-1], default_timezone=dateutil.tz.tzlocal())}"
|
||||
)
|
||||
print(
|
||||
"parse_date (default_timezone=local) :"
|
||||
f' {parse_date(datestring_now[0:-1], default_timezone="local")}'
|
||||
)
|
||||
print(
|
||||
"parse_date (default_timezone=Paris) :"
|
||||
f' {parse_date(datestring_now[0:-1], default_timezone="Europe/Paris")}'
|
||||
)
|
||||
print(f"parse_date (to_timezone=utc) : {parse_date(datestring_now, to_timezone=pytz.utc)}")
|
||||
print(
|
||||
"parse_date (to_timezone=local) :"
|
||||
f" {parse_date(datestring_now, to_timezone=dateutil.tz.tzlocal())}"
|
||||
)
|
||||
print(f'parse_date (to_timezone=local) : {parse_date(datestring_now, to_timezone="local")}')
|
||||
print(
|
||||
f'parse_date (to_timezone=Tokyo) : {parse_date(datestring_now, to_timezone="Asia/Tokyo")}'
|
||||
)
|
||||
print(f"parse_date (naive=True) : {parse_date(datestring_now, naive=True)}")
|
69
mylib/scripts/map_test.py
Normal file
69
mylib/scripts/map_test.py
Normal file
|
@ -0,0 +1,69 @@
|
|||
""" Test mapping """
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from mylib import pretty_format_value
|
||||
from mylib.mapping import map_hash
|
||||
from mylib.scripts.helpers import get_opts_parser, init_logging
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def main(argv=None):
|
||||
"""Script main"""
|
||||
if argv is None:
|
||||
argv = sys.argv[1:]
|
||||
|
||||
# Options parser
|
||||
parser = get_opts_parser(progress=True)
|
||||
options = parser.parse_args()
|
||||
|
||||
# Initialize logs
|
||||
init_logging(options, "Test mapping")
|
||||
|
||||
src = {
|
||||
"uid": "hmartin",
|
||||
"firstname": "Martin",
|
||||
"lastname": "Martin",
|
||||
"disp_name": "Henri Martin",
|
||||
"line_1": "3 rue de Paris",
|
||||
"line_2": "Pour Pierre",
|
||||
"zip_text": "92 120",
|
||||
"city_text": "Montrouge",
|
||||
"line_city": "92120 Montrouge",
|
||||
"tel1": "01 00 00 00 00",
|
||||
"tel2": "09 00 00 00 00",
|
||||
"mobile": "06 00 00 00 00",
|
||||
"fax": "01 00 00 00 00",
|
||||
"email": "H.MARTIN@GMAIL.COM",
|
||||
}
|
||||
|
||||
map_c = {
|
||||
"uid": {"order": 0, "key": "uid", "required": True},
|
||||
"givenName": {"order": 1, "key": "firstname"},
|
||||
"sn": {"order": 2, "key": "lastname"},
|
||||
"cn": {
|
||||
"order": 3,
|
||||
"key": "disp_name",
|
||||
"required": True,
|
||||
"or": {"attrs": ["firstname", "lastname"], "join": " "},
|
||||
},
|
||||
"displayName": {"order": 4, "other_key": "displayName"},
|
||||
"street": {"order": 5, "join": " / ", "keys": ["ligne_1", "ligne_2"]},
|
||||
"postalCode": {"order": 6, "key": "zip_text", "cleanRegex": "[^0-9]"},
|
||||
"l": {"order": 7, "key": "city_text"},
|
||||
"postalAddress": {"order": 8, "join": "$", "keys": ["ligne_1", "ligne_2", "ligne_city"]},
|
||||
"telephoneNumber": {
|
||||
"order": 9,
|
||||
"keys": ["tel1", "tel2"],
|
||||
"cleanRegex": "[^0-9+]",
|
||||
"deduplicate": True,
|
||||
},
|
||||
"mobile": {"order": 10, "key": "mobile"},
|
||||
"facsimileTelephoneNumber": {"order": 11, "key": "fax"},
|
||||
"mail": {"order": 12, "key": "email", "convert": lambda x: x.lower().strip()},
|
||||
}
|
||||
|
||||
print("Mapping source:\n" + pretty_format_value(src))
|
||||
print("Mapping config:\n" + pretty_format_value(map_c))
|
||||
print("Mapping result:\n" + pretty_format_value(map_hash(map_c, src)))
|
41
mylib/scripts/pbar_test.py
Normal file
41
mylib/scripts/pbar_test.py
Normal file
|
@ -0,0 +1,41 @@
|
|||
""" Test Progress bar """
|
||||
import logging
|
||||
import sys
|
||||
import time
|
||||
|
||||
from mylib.pbar import Pbar
|
||||
from mylib.scripts.helpers import get_opts_parser, init_logging
|
||||
|
||||
log = logging.getLogger("mylib.scripts.pbar_test")
|
||||
|
||||
|
||||
def main(argv=None): # pylint: disable=too-many-locals,too-many-statements
|
||||
"""Script main"""
|
||||
if argv is None:
|
||||
argv = sys.argv[1:]
|
||||
|
||||
# Options parser
|
||||
default_max_val = 10
|
||||
parser = get_opts_parser(progress=True)
|
||||
|
||||
parser.add_argument(
|
||||
"-c",
|
||||
"--count",
|
||||
action="store",
|
||||
type=int,
|
||||
dest="count",
|
||||
help=f"Progress bar max value (default: {default_max_val})",
|
||||
default=default_max_val,
|
||||
)
|
||||
|
||||
options = parser.parse_args()
|
||||
|
||||
# Initialize logs
|
||||
init_logging(options, "Test Pbar")
|
||||
|
||||
pbar = Pbar("Test", options.count, enabled=options.progress)
|
||||
|
||||
for idx in range(0, options.count): # pylint: disable=unused-variable
|
||||
pbar.increment()
|
||||
time.sleep(0.3)
|
||||
pbar.finish()
|
46
mylib/scripts/report_test.py
Normal file
46
mylib/scripts/report_test.py
Normal file
|
@ -0,0 +1,46 @@
|
|||
""" Test report """
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from mylib.report import Report
|
||||
from mylib.scripts.helpers import add_email_opts, get_opts_parser, init_email_client, init_logging
|
||||
|
||||
log = logging.getLogger("mylib.scripts.report_test")
|
||||
|
||||
|
||||
def main(argv=None): # pylint: disable=too-many-locals,too-many-statements
|
||||
"""Script main"""
|
||||
if argv is None:
|
||||
argv = sys.argv[1:]
|
||||
|
||||
# Options parser
|
||||
parser = get_opts_parser(just_try=True)
|
||||
add_email_opts(parser)
|
||||
|
||||
report_opts = parser.add_argument_group("Report options")
|
||||
|
||||
report_opts.add_argument(
|
||||
"-t",
|
||||
"--to",
|
||||
action="store",
|
||||
type=str,
|
||||
dest="report_recipient",
|
||||
help="Send report to this email",
|
||||
)
|
||||
|
||||
options = parser.parse_args()
|
||||
|
||||
if not options.report_recipient:
|
||||
parser.error("You must specify a report recipient using -t/--to parameter")
|
||||
|
||||
# Initialize logs
|
||||
report = Report(options=options, subject="Test report")
|
||||
init_logging(options, "Test Report", report=report)
|
||||
|
||||
email_client = init_email_client(options)
|
||||
report.send_at_exit(email_client=email_client)
|
||||
|
||||
logging.debug("Test debug message")
|
||||
logging.info("Test info message")
|
||||
logging.warning("Test warning message")
|
||||
logging.error("Test error message")
|
106
mylib/scripts/sftp_test.py
Normal file
106
mylib/scripts/sftp_test.py
Normal file
|
@ -0,0 +1,106 @@
|
|||
""" Test SFTP client """
|
||||
import atexit
|
||||
import getpass
|
||||
import logging
|
||||
import os
|
||||
import random
|
||||
import string
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
from mylib.scripts.helpers import add_sftp_opts, get_opts_parser, init_logging
|
||||
from mylib.sftp import SFTPClient
|
||||
|
||||
log = logging.getLogger("mylib.scripts.sftp_test")
|
||||
|
||||
|
||||
def main(argv=None): # pylint: disable=too-many-locals,too-many-statements
|
||||
"""Script main"""
|
||||
if argv is None:
|
||||
argv = sys.argv[1:]
|
||||
|
||||
# Options parser
|
||||
parser = get_opts_parser(just_try=True)
|
||||
add_sftp_opts(parser)
|
||||
|
||||
test_opts = parser.add_argument_group("Test SFTP options")
|
||||
|
||||
test_opts.add_argument(
|
||||
"-p",
|
||||
"--remote-upload-path",
|
||||
action="store",
|
||||
type=str,
|
||||
dest="upload_path",
|
||||
help="Remote upload path (default: on remote initial connection directory)",
|
||||
)
|
||||
|
||||
options = parser.parse_args()
|
||||
|
||||
# Initialize logs
|
||||
init_logging(options, "Test SFTP client")
|
||||
|
||||
if options.sftp_user and not options.sftp_password:
|
||||
options.sftp_password = getpass.getpass("Please enter SFTP password: ")
|
||||
|
||||
log.info("Initialize Email client")
|
||||
sftp = SFTPClient(options=options)
|
||||
sftp.connect()
|
||||
atexit.register(sftp.close)
|
||||
|
||||
log.debug("Create temporary file")
|
||||
test_content = b"Juste un test."
|
||||
tmp_dir = tempfile.TemporaryDirectory() # pylint: disable=consider-using-with
|
||||
tmp_file = os.path.join(
|
||||
tmp_dir.name,
|
||||
f'tmp{"".join(random.choice(string.ascii_lowercase) for i in range(8))}', # nosec
|
||||
)
|
||||
log.debug('Temporary file path: "%s"', tmp_file)
|
||||
with open(tmp_file, "wb") as file_desc:
|
||||
file_desc.write(test_content)
|
||||
|
||||
log.debug(
|
||||
"Upload file %s to SFTP server (in %s)",
|
||||
tmp_file,
|
||||
options.upload_path if options.upload_path else "remote initial connection directory",
|
||||
)
|
||||
if not sftp.upload_file(tmp_file, options.upload_path):
|
||||
log.error("Fail to upload test file on SFTP server")
|
||||
sys.exit(1)
|
||||
|
||||
log.info("Test file uploaded on SFTP server")
|
||||
remote_filepath = (
|
||||
os.path.join(options.upload_path, os.path.basename(tmp_file))
|
||||
if options.upload_path
|
||||
else os.path.basename(tmp_file)
|
||||
)
|
||||
|
||||
if not sftp._just_try: # pylint: disable=protected-access
|
||||
with tempfile.NamedTemporaryFile() as tmp_file2:
|
||||
log.info("Retrieve test file to %s", tmp_file2.name)
|
||||
if not sftp.get_file(remote_filepath, tmp_file2.name):
|
||||
log.error("Fail to retrieve test file")
|
||||
else:
|
||||
with open(tmp_file2.name, "rb") as file_desc:
|
||||
content = file_desc.read()
|
||||
log.debug("Read content: %s", content)
|
||||
if test_content == content:
|
||||
log.info("Content file retrieved match with uploaded one")
|
||||
else:
|
||||
log.error("Content file retrieved doest not match with uploaded one")
|
||||
|
||||
try:
|
||||
log.info("Remotly open test file %s", remote_filepath)
|
||||
file_desc = sftp.open_file(remote_filepath)
|
||||
content = file_desc.read()
|
||||
log.debug("Read content: %s", content)
|
||||
if test_content == content:
|
||||
log.info("Content of remote file match with uploaded one")
|
||||
else:
|
||||
log.error("Content of remote file doest not match with uploaded one")
|
||||
except Exception: # pylint: disable=broad-except
|
||||
log.exception("An exception occurred remotly opening test file %s", remote_filepath)
|
||||
|
||||
if sftp.remove_file(remote_filepath):
|
||||
log.info("Test file removed on SFTP server")
|
||||
else:
|
||||
log.error("Fail to remove test file on SFTP server")
|
12
mylib/scripts/telltale_check_test.py
Normal file
12
mylib/scripts/telltale_check_test.py
Normal file
|
@ -0,0 +1,12 @@
|
|||
""" Test telltale file """
|
||||
import logging
|
||||
|
||||
from mylib.scripts.telltale_test import default_filepath
|
||||
from mylib.telltale import TelltaleFile
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def main(argv=None):
|
||||
"""Script main"""
|
||||
TelltaleFile.check_entrypoint(argv=argv, default_filepath=default_filepath)
|
40
mylib/scripts/telltale_test.py
Normal file
40
mylib/scripts/telltale_test.py
Normal file
|
@ -0,0 +1,40 @@
|
|||
""" Test telltale file """
|
||||
import logging
|
||||
import os.path
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
from mylib.scripts.helpers import get_opts_parser, init_logging
|
||||
from mylib.telltale import TelltaleFile
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
default_filepath = os.path.join(tempfile.gettempdir(), f"{__name__}.last")
|
||||
|
||||
|
||||
def main(argv=None):
|
||||
"""Script main"""
|
||||
if argv is None:
|
||||
argv = sys.argv[1:]
|
||||
|
||||
# Options parser
|
||||
parser = get_opts_parser()
|
||||
options = parser.parse_args()
|
||||
|
||||
parser.add_argument(
|
||||
"-p",
|
||||
"--telltale-file-path",
|
||||
action="store",
|
||||
type=str,
|
||||
dest="telltale_file_path",
|
||||
help=f"Telltale file path (default: {default_filepath})",
|
||||
default=default_filepath,
|
||||
)
|
||||
|
||||
options = parser.parse_args()
|
||||
|
||||
# Initialize logs
|
||||
init_logging(options, __doc__)
|
||||
|
||||
telltale_file = TelltaleFile(filepath=options.telltale_file_path)
|
||||
telltale_file.update()
|
162
mylib/sftp.py
Normal file
162
mylib/sftp.py
Normal file
|
@ -0,0 +1,162 @@
|
|||
""" SFTP client """
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
from paramiko import AutoAddPolicy, SFTPAttributes, SSHClient
|
||||
|
||||
from mylib.config import (
|
||||
BooleanOption,
|
||||
ConfigurableObject,
|
||||
IntegerOption,
|
||||
PasswordOption,
|
||||
StringOption,
|
||||
)
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SFTPClient(ConfigurableObject):
|
||||
"""
|
||||
SFTP client
|
||||
|
||||
This class abstract all interactions with the SFTP server.
|
||||
"""
|
||||
|
||||
_config_name = "sftp"
|
||||
_config_comment = "SFTP"
|
||||
_defaults = {
|
||||
"host": "localhost",
|
||||
"port": 22,
|
||||
"user": None,
|
||||
"password": None,
|
||||
"known_hosts": os.path.expanduser("~/.ssh/known_hosts"),
|
||||
"auto_add_unknown_host_key": False,
|
||||
"just_try": False,
|
||||
}
|
||||
|
||||
ssh_client = None
|
||||
sftp_client = None
|
||||
initial_directory = None
|
||||
|
||||
# pylint: disable=arguments-differ,arguments-renamed
|
||||
def configure(self, **kwargs):
|
||||
"""Configure options on registered mylib.Config object"""
|
||||
section = super().configure(
|
||||
just_try=kwargs.pop("just_try", True),
|
||||
just_try_help=kwargs.pop(
|
||||
"just_try_help", "Just-try mode: do not really make change on remote SFTP host"
|
||||
),
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
section.add_option(
|
||||
StringOption,
|
||||
"host",
|
||||
default=self._defaults["host"],
|
||||
comment="SFTP server hostname/IP address",
|
||||
)
|
||||
section.add_option(
|
||||
IntegerOption, "port", default=self._defaults["port"], comment="SFTP server port"
|
||||
)
|
||||
section.add_option(
|
||||
StringOption,
|
||||
"user",
|
||||
default=self._defaults["user"],
|
||||
comment="SFTP authentication username",
|
||||
)
|
||||
section.add_option(
|
||||
PasswordOption,
|
||||
"password",
|
||||
default=self._defaults["password"],
|
||||
comment='SFTP authentication password (set to "keyring" to use XDG keyring)',
|
||||
username_option="user",
|
||||
keyring_value="keyring",
|
||||
)
|
||||
section.add_option(
|
||||
StringOption,
|
||||
"known_hosts",
|
||||
default=self._defaults["known_hosts"],
|
||||
comment="SFTP known_hosts filepath",
|
||||
)
|
||||
section.add_option(
|
||||
BooleanOption,
|
||||
"auto_add_unknown_host_key",
|
||||
default=self._defaults["auto_add_unknown_host_key"],
|
||||
comment="Auto add unknown host key",
|
||||
)
|
||||
|
||||
return section
|
||||
|
||||
def initialize(self, loaded_config=None):
|
||||
"""Configuration initialized hook"""
|
||||
super().__init__(loaded_config=loaded_config)
|
||||
|
||||
def connect(self):
|
||||
"""Connect to SFTP server"""
|
||||
if self.ssh_client:
|
||||
return
|
||||
host = self._get_option("host")
|
||||
port = self._get_option("port")
|
||||
log.info("Connect to SFTP server %s:%d", host, port)
|
||||
self.ssh_client = SSHClient()
|
||||
if self._get_option("known_hosts"):
|
||||
self.ssh_client.load_host_keys(self._get_option("known_hosts"))
|
||||
if self._get_option("auto_add_unknown_host_key"):
|
||||
log.debug("Set missing host key policy to auto-add")
|
||||
self.ssh_client.set_missing_host_key_policy(AutoAddPolicy())
|
||||
self.ssh_client.connect(
|
||||
host,
|
||||
port=port,
|
||||
username=self._get_option("user"),
|
||||
password=self._get_option("password"),
|
||||
)
|
||||
self.sftp_client = self.ssh_client.open_sftp()
|
||||
self.initial_directory = self.sftp_client.getcwd()
|
||||
if self.initial_directory:
|
||||
log.debug("Initial remote directory: '%s'", self.initial_directory)
|
||||
else:
|
||||
log.debug("Fail to retrieve remote directory, use empty string instead")
|
||||
self.initial_directory = ""
|
||||
|
||||
def get_file(self, remote_filepath, local_filepath):
|
||||
"""Retrieve a file from SFTP server"""
|
||||
self.connect()
|
||||
log.debug("Retrieve file '%s' to '%s'", remote_filepath, local_filepath)
|
||||
return self.sftp_client.get(remote_filepath, local_filepath) is None
|
||||
|
||||
def open_file(self, remote_filepath, mode="r"):
|
||||
"""Remotly open a file on SFTP server"""
|
||||
self.connect()
|
||||
log.debug("Remotly open file '%s'", remote_filepath)
|
||||
return self.sftp_client.open(remote_filepath, mode=mode)
|
||||
|
||||
def upload_file(self, filepath, remote_directory=None):
|
||||
"""Upload a file on SFTP server"""
|
||||
self.connect()
|
||||
remote_filepath = os.path.join(
|
||||
remote_directory if remote_directory else self.initial_directory,
|
||||
os.path.basename(filepath),
|
||||
)
|
||||
log.debug("Upload file '%s' to '%s'", filepath, remote_filepath)
|
||||
if self._just_try:
|
||||
log.debug(
|
||||
"Just-try mode: do not really upload file '%s' to '%s'", filepath, remote_filepath
|
||||
)
|
||||
return True
|
||||
result = self.sftp_client.put(filepath, remote_filepath)
|
||||
return isinstance(result, SFTPAttributes)
|
||||
|
||||
def remove_file(self, filepath):
|
||||
"""Remove a file on SFTP server"""
|
||||
self.connect()
|
||||
log.debug("Remove file '%s'", filepath)
|
||||
if self._just_try:
|
||||
log.debug("Just - try mode: do not really remove file '%s'", filepath)
|
||||
return True
|
||||
return self.sftp_client.remove(filepath) is None
|
||||
|
||||
def close(self):
|
||||
"""Close SSH/SFTP connection"""
|
||||
log.debug("Close connection")
|
||||
self.ssh_client.close()
|
165
mylib/telltale.py
Normal file
165
mylib/telltale.py
Normal file
|
@ -0,0 +1,165 @@
|
|||
""" Telltale files helpers """
|
||||
|
||||
import argparse
|
||||
import datetime
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
|
||||
from mylib import pretty_format_timedelta
|
||||
from mylib.scripts.helpers import get_opts_parser, init_logging
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
DEFAULT_WARNING_THRESHOLD = 90
|
||||
DEFAULT_CRITICAL_THRESHOLD = 240
|
||||
|
||||
|
||||
class TelltaleFile:
|
||||
"""Telltale file helper class"""
|
||||
|
||||
def __init__(self, filepath=None, filename=None, dirpath=None):
|
||||
assert filepath or filename, "filename or filepath is required"
|
||||
if filepath:
|
||||
assert (
|
||||
not filename or os.path.basename(filepath) == filename
|
||||
), "filepath and filename does not match"
|
||||
assert (
|
||||
not dirpath or os.path.dirname(filepath) == dirpath
|
||||
), "filepath and dirpath does not match"
|
||||
self.filename = filename if filename else os.path.basename(filepath)
|
||||
self.dirpath = (
|
||||
dirpath if dirpath else (os.path.dirname(filepath) if filepath else os.getcwd())
|
||||
)
|
||||
self.filepath = filepath if filepath else os.path.join(self.dirpath, self.filename)
|
||||
|
||||
@property
|
||||
def last_update(self):
|
||||
"""Retrieve last update datetime of the telltall file"""
|
||||
try:
|
||||
return datetime.datetime.fromtimestamp(os.stat(self.filepath).st_mtime)
|
||||
except FileNotFoundError:
|
||||
log.info("Telltale file not found (%s)", self.filepath)
|
||||
return None
|
||||
|
||||
def update(self):
|
||||
"""Update the telltale file"""
|
||||
log.info("Update telltale file (%s)", self.filepath)
|
||||
try:
|
||||
os.utime(self.filepath, None)
|
||||
except FileNotFoundError:
|
||||
# pylint: disable=consider-using-with
|
||||
open(self.filepath, "a", encoding="utf-8").close()
|
||||
|
||||
def remove(self):
|
||||
"""Remove the telltale file"""
|
||||
try:
|
||||
os.remove(self.filepath)
|
||||
return True
|
||||
except FileNotFoundError:
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def check_entrypoint(
|
||||
cls,
|
||||
argv=None,
|
||||
description=None,
|
||||
default_filepath=None,
|
||||
default_warning_threshold=None,
|
||||
default_critical_threshold=None,
|
||||
fail_message=None,
|
||||
success_message=None,
|
||||
):
|
||||
"""Entry point of the script to check a telltale file last update"""
|
||||
argv = argv if argv else sys.argv
|
||||
description = description if description else "Check last execution date"
|
||||
parser = get_opts_parser(desc=description, exit_on_error=False)
|
||||
|
||||
parser.add_argument(
|
||||
"-p",
|
||||
"--telltale-file-path",
|
||||
action="store",
|
||||
type=str,
|
||||
dest="telltale_file_path",
|
||||
help=f"Telltale file path (default: {default_filepath})",
|
||||
default=default_filepath,
|
||||
required=not default_filepath,
|
||||
)
|
||||
|
||||
default_warning_threshold = (
|
||||
default_warning_threshold
|
||||
if default_warning_threshold is not None
|
||||
else DEFAULT_WARNING_THRESHOLD
|
||||
)
|
||||
default_critical_threshold = (
|
||||
default_critical_threshold
|
||||
if default_critical_threshold is not None
|
||||
else DEFAULT_CRITICAL_THRESHOLD
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-w",
|
||||
"--warning",
|
||||
type=int,
|
||||
dest="warning",
|
||||
help=(
|
||||
"Specify warning threshold (in minutes, default: "
|
||||
f"{default_warning_threshold} minutes)"
|
||||
),
|
||||
default=default_warning_threshold,
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-c",
|
||||
"--critical",
|
||||
type=int,
|
||||
dest="critical",
|
||||
help=(
|
||||
"Specify critical threshold (in minutes, default: "
|
||||
f"{default_critical_threshold} minutes)"
|
||||
),
|
||||
default=default_critical_threshold,
|
||||
)
|
||||
|
||||
try:
|
||||
options = parser.parse_args(argv[1:])
|
||||
except argparse.ArgumentError as err:
|
||||
print(f"UNKNOWN - {err}")
|
||||
sys.exit(3)
|
||||
|
||||
# Initialize logs
|
||||
init_logging(options, argv[0])
|
||||
|
||||
telltale_file = cls(filepath=options.telltale_file_path)
|
||||
last = telltale_file.last_update
|
||||
if not last:
|
||||
status = "UNKNOWN"
|
||||
exit_code = 3
|
||||
msg = (
|
||||
fail_message
|
||||
if fail_message
|
||||
else "Fail to retrieve last successful date of execution"
|
||||
)
|
||||
else:
|
||||
delay = datetime.datetime.now() - last
|
||||
msg = (
|
||||
success_message
|
||||
if success_message
|
||||
else "Last successful execution was {last_delay} ago ({last_date})"
|
||||
).format(
|
||||
last_delay=pretty_format_timedelta(delay),
|
||||
last_date=last.strftime("%Y/%m/%d %H:%M:%S"),
|
||||
)
|
||||
|
||||
if delay >= datetime.timedelta(minutes=options.critical):
|
||||
status = "CRITICAL"
|
||||
exit_code = 2
|
||||
elif delay >= datetime.timedelta(minutes=options.warning):
|
||||
status = "WARNING"
|
||||
exit_code = 1
|
||||
else:
|
||||
status = "OK"
|
||||
exit_code = 0
|
||||
|
||||
print(f"{status} - {msg}")
|
||||
sys.exit(exit_code)
|
207
opening_hours.py
207
opening_hours.py
|
@ -1,207 +0,0 @@
|
|||
import datetime, re, time, logging
|
||||
|
||||
week_days=['lundi', 'mardi', 'mercredi', 'jeudi', 'vendredi', 'samedi', 'dimanche']
|
||||
def easter_date(year):
|
||||
a=year//100
|
||||
b=year%100
|
||||
c=(3*(a+25))//4
|
||||
d=(3*(a+25))%4
|
||||
e=(8*(a+11))//25
|
||||
f=(5*a+b)%19
|
||||
g=(19*f+c-e)%30
|
||||
h=(f+11*g)//319
|
||||
j=(60*(5-d)+b)//4
|
||||
k=(60*(5-d)+b)%4
|
||||
m=(2*j-k-g+h)%7
|
||||
n=(g-h+m+114)//31
|
||||
p=(g-h+m+114)%31
|
||||
day=p+1
|
||||
month=n
|
||||
return datetime.date(year, month, day)
|
||||
|
||||
def nonworking_french_public_days_of_the_year(year=None):
|
||||
if year is None:
|
||||
year=datetime.date.today().year
|
||||
dp=easter_date(year)
|
||||
return {
|
||||
'1janvier': datetime.date(year, 1, 1),
|
||||
'paques': dp,
|
||||
'lundi_paques': (dp+datetime.timedelta(1)),
|
||||
'1mai': datetime.date(year, 5, 1),
|
||||
'8mai': datetime.date(year, 5, 8),
|
||||
'jeudi_ascension': (dp+datetime.timedelta(39)),
|
||||
'pentecote': (dp+datetime.timedelta(49)),
|
||||
'lundi_pentecote': (dp+datetime.timedelta(50)),
|
||||
'14juillet': datetime.date(year, 7, 14),
|
||||
'15aout': datetime.date(year, 8, 15),
|
||||
'1novembre': datetime.date(year, 11, 1),
|
||||
'11novembre': datetime.date(year, 11, 11),
|
||||
'noel': datetime.date(year, 12, 25),
|
||||
'saint_etienne': datetime.date(year, 12, 26),
|
||||
}
|
||||
|
||||
def parse_exceptional_closures(values):
|
||||
exceptional_closures=[]
|
||||
date_pattern=re.compile('^([0-9]{2})/([0-9]{2})/([0-9]{4})$')
|
||||
time_pattern=re.compile('^([0-9]{1,2})h([0-9]{2})?$')
|
||||
for value in values:
|
||||
days=[]
|
||||
hours_periods=[]
|
||||
words=value.strip().split()
|
||||
for word in words:
|
||||
if word=='':
|
||||
continue
|
||||
parts=word.split('-')
|
||||
if len(parts)==1:
|
||||
# ex : 31/02/2017
|
||||
ptime=time.strptime(word,'%d/%m/%Y')
|
||||
date=datetime.date(ptime.tm_year, ptime.tm_mon, ptime.tm_mday)
|
||||
if date not in days:
|
||||
days.append(date)
|
||||
elif len(parts)==2:
|
||||
# ex : 18/12/2017-20/12/2017 ou 9h-10h30
|
||||
if date_pattern.match(parts[0]) and date_pattern.match(parts[1]):
|
||||
# ex : 18/12/2017-20/12/2017
|
||||
pstart=time.strptime(parts[0],'%d/%m/%Y')
|
||||
pstop=time.strptime(parts[1],'%d/%m/%Y')
|
||||
if pstop<=pstart:
|
||||
raise ValueError('Day %s <= %s' % (parts[1],parts[0]))
|
||||
|
||||
date=datetime.date(pstart.tm_year, pstart.tm_mon, pstart.tm_mday)
|
||||
stop_date=datetime.date(pstop.tm_year, pstart.tm_mon, pstart.tm_mday)
|
||||
while date<=stop_date:
|
||||
if date not in days:
|
||||
days.append(date)
|
||||
date+=datetime.timedelta(days=1)
|
||||
else:
|
||||
# ex : 9h-10h30
|
||||
mstart=time_pattern.match(parts[0])
|
||||
mstop=time_pattern.match(parts[1])
|
||||
if not mstart or not mstop:
|
||||
raise ValueError('"%s" is not a valid time period' % word)
|
||||
hstart=datetime.time(int(mstart.group(1)), int(mstart.group(2) or 0))
|
||||
hstop=datetime.time(int(mstop.group(1)), int(mstop.group(2) or 0))
|
||||
if hstop<=hstart:
|
||||
raise ValueError('Time %s <= %s' % (parts[1],parts[0]))
|
||||
hours_periods.append({'start': hstart, 'stop': hstop})
|
||||
else:
|
||||
raise ValueError('Invalid number of part in this word : "%s"' % word)
|
||||
if not days:
|
||||
raise ValueError('No days found in value "%s"' % word)
|
||||
exceptional_closures.append({'days': days, 'hours_periods': hours_periods})
|
||||
return exceptional_closures
|
||||
|
||||
|
||||
def parse_normal_opening_hours(values):
|
||||
normal_opening_hours=[]
|
||||
time_pattern=re.compile('^([0-9]{1,2})h([0-9]{2})?$')
|
||||
for value in values:
|
||||
days=[]
|
||||
hours_periods=[]
|
||||
words=value.strip().split()
|
||||
for word in words:
|
||||
if word=='':
|
||||
continue
|
||||
parts=word.split('-')
|
||||
if len(parts)==1:
|
||||
# ex : jeudi
|
||||
if word not in week_days:
|
||||
raise ValueError('"%s" is not a valid week day' % word)
|
||||
if word not in days:
|
||||
days.append(word)
|
||||
elif len(parts)==2:
|
||||
# ex : lundi-jeudi ou 9h-10h30
|
||||
if parts[0] in week_days and parts[1] in week_days:
|
||||
# ex : lundi-jeudi
|
||||
if week_days.index(parts[1]) <= week_days.index(parts[0]):
|
||||
raise ValueError('"%s" is before "%s"' % (parts[1],parts[0]))
|
||||
started=False
|
||||
for d in week_days:
|
||||
if not started and d!=parts[0]:
|
||||
continue
|
||||
started=True
|
||||
if d not in days:
|
||||
days.append(d)
|
||||
if d==parts[1]:
|
||||
break
|
||||
else:
|
||||
#ex : 9h-10h30
|
||||
mstart=time_pattern.match(parts[0])
|
||||
mstop=time_pattern.match(parts[1])
|
||||
if not mstart or not mstop:
|
||||
raise ValueError('"%s" is not a valid time period' % word)
|
||||
hstart=datetime.time(int(mstart.group(1)), int(mstart.group(2) or 0))
|
||||
hstop=datetime.time(int(mstop.group(1)), int(mstop.group(2) or 0))
|
||||
if hstop<=hstart:
|
||||
raise ValueError('Time %s <= %s' % (parts[1],parts[0]))
|
||||
hours_periods.append({'start': hstart, 'stop': hstop})
|
||||
else:
|
||||
raise ValueError('Invalid number of part in this word : "%s"' % word)
|
||||
if not days and not hours_periods:
|
||||
raise ValueError('No days or hours period found in this value : "%s"' % value)
|
||||
normal_opening_hours.append({'days': days, 'hours_periods': hours_periods})
|
||||
return normal_opening_hours
|
||||
|
||||
def is_closed(normal_opening_hours_values=[],exceptional_closures_values=[],nonworking_public_holidays_values=[], when=datetime.datetime.now(), on_error='raise', exceptional_closure_on_nonworking_public_days=False):
|
||||
when_date=when.date()
|
||||
when_time=when.time()
|
||||
when_weekday=week_days[when.timetuple().tm_wday]
|
||||
on_error_result=None
|
||||
if on_error=='closed':
|
||||
on_error_result={'closed': True, 'exceptional_closure': False, 'exceptional_closure_all_day': False}
|
||||
elif on_error=='opened':
|
||||
on_error_result={'closed': False, 'exceptional_closure': False, 'exceptional_closure_all_day': False}
|
||||
|
||||
logging.debug("%s => %s / %s / %s" % (when, when_date, when_time, when_weekday))
|
||||
if len(nonworking_public_holidays_values)>0:
|
||||
logging.debug("Nonworking public holidays : %s" % nonworking_public_holidays_values)
|
||||
nonworking_days=nonworking_french_public_days_of_the_year()
|
||||
for day in nonworking_public_holidays_values:
|
||||
if day in nonworking_days and when_date==nonworking_days[day]:
|
||||
logging.debug("Non working day : %s" % day)
|
||||
return {'closed': True, 'exceptional_closure': exceptional_closure_on_nonworking_public_days, 'exceptional_closure_all_day': exceptional_closure_on_nonworking_public_days}
|
||||
|
||||
if len(exceptional_closures_values)>0:
|
||||
try:
|
||||
exceptional_closures=parse_exceptional_closures(exceptional_closures_values)
|
||||
logging.debug('Exceptional closures : %s' % exceptional_closures)
|
||||
except Exception, e:
|
||||
logging.error("%s => Not closed by default" % e)
|
||||
if on_error_result is None:
|
||||
raise e
|
||||
return on_error_result
|
||||
for cl in exceptional_closures:
|
||||
if when_date not in cl['days']:
|
||||
logging.debug("when_date (%s) no in days (%s)" % (when_date,cl['days']))
|
||||
continue
|
||||
if not cl['hours_periods']:
|
||||
# All day exceptional closure
|
||||
return {'closed': True, 'exceptional_closure': True, 'exceptional_closure_all_day': True}
|
||||
for hp in cl['hours_periods']:
|
||||
if hp['start']<=when_time and hp['stop']>= when_time:
|
||||
return {'closed': True, 'exceptional_closure': True, 'exceptional_closure_all_day': False}
|
||||
|
||||
if len(normal_opening_hours_values)>0:
|
||||
try:
|
||||
normal_opening_hours=parse_normal_opening_hours(normal_opening_hours_values)
|
||||
logging.debug('Normal opening hours : %s' % normal_opening_hours)
|
||||
except Exception, e:
|
||||
logging.error("%s => Not closed by default" % e)
|
||||
if on_error_result is None:
|
||||
raise e
|
||||
return on_error_result
|
||||
for oh in normal_opening_hours:
|
||||
if oh['days'] and when_weekday not in oh['days']:
|
||||
logging.debug("when_weekday (%s) no in days (%s)" % (when_weekday,oh['days']))
|
||||
continue
|
||||
if not oh['hours_periods']:
|
||||
# All day opened
|
||||
return {'closed': False, 'exceptional_closure': False, 'exceptional_closure_all_day': False}
|
||||
for hp in oh['hours_periods']:
|
||||
if hp['start']<=when_time and hp['stop']>= when_time:
|
||||
return {'closed': False, 'exceptional_closure': False, 'exceptional_closure_all_day': False}
|
||||
logging.debug("Not in normal opening hours => closed")
|
||||
return {'closed': True, 'exceptional_closure': False, 'exceptional_closure_all_day': False}
|
||||
|
||||
# Not a nonworking day, not during exceptional closure and no normal opening hours defined => Opened
|
||||
return {'closed': False, 'exceptional_closure': False, 'exceptional_closure_all_day': False}
|
3
setup.cfg
Normal file
3
setup.cfg
Normal file
|
@ -0,0 +1,3 @@
|
|||
[flake8]
|
||||
ignore = E501,W503
|
||||
max-line-length = 100
|
88
setup.py
Normal file
88
setup.py
Normal file
|
@ -0,0 +1,88 @@
|
|||
#!/usr/bin/env python
|
||||
"""Setuptools script"""
|
||||
|
||||
from setuptools import find_packages, setup
|
||||
|
||||
extras_require = {
|
||||
"dev": [
|
||||
"pytest",
|
||||
"mocker",
|
||||
"pytest-mock",
|
||||
"pylint == 2.15.10",
|
||||
"pre-commit",
|
||||
],
|
||||
"config": [
|
||||
"argcomplete",
|
||||
"keyring",
|
||||
"systemd-python",
|
||||
],
|
||||
"ldap": [
|
||||
"python-ldap",
|
||||
"python-dateutil",
|
||||
"pytz",
|
||||
],
|
||||
"email": [
|
||||
"python-magic",
|
||||
"mako",
|
||||
],
|
||||
"pgsql": [
|
||||
"psycopg2",
|
||||
],
|
||||
"oracle": [
|
||||
"cx_Oracle",
|
||||
],
|
||||
"mysql": [
|
||||
"mysqlclient",
|
||||
],
|
||||
"sftp": [
|
||||
"paramiko",
|
||||
],
|
||||
}
|
||||
|
||||
install_requires = ["progressbar"]
|
||||
for extra, deps in extras_require.items():
|
||||
if extra != "dev":
|
||||
install_requires.extend(deps)
|
||||
|
||||
version = "0.1"
|
||||
|
||||
with open("README.md", encoding="utf-8") as fd:
|
||||
long_description = fd.read()
|
||||
|
||||
setup(
|
||||
name="mylib",
|
||||
version=version,
|
||||
description="A set of helpers small libs to make common tasks easier in my script development",
|
||||
long_description=long_description,
|
||||
classifiers=[
|
||||
"Programming Language :: Python",
|
||||
],
|
||||
install_requires=install_requires,
|
||||
extras_require=extras_require,
|
||||
author="Benjamin Renard",
|
||||
author_email="brenard@zionetrix.net",
|
||||
url="https://gogs.zionetrix.net/bn8/python-mylib",
|
||||
packages=find_packages(),
|
||||
include_package_data=True,
|
||||
package_data={
|
||||
"": [
|
||||
"scripts/email_templates/*.subject",
|
||||
"scripts/email_templates/*.txt",
|
||||
"scripts/email_templates/*.html",
|
||||
],
|
||||
},
|
||||
zip_safe=False,
|
||||
entry_points={
|
||||
"console_scripts": [
|
||||
"mylib-test-email = mylib.scripts.email_test:main",
|
||||
"mylib-test-email-with-config = mylib.scripts.email_test_with_config:main",
|
||||
"mylib-test-map = mylib.scripts.map_test:main",
|
||||
"mylib-test-pbar = mylib.scripts.pbar_test:main",
|
||||
"mylib-test-report = mylib.scripts.report_test:main",
|
||||
"mylib-test-ldap = mylib.scripts.ldap_test:main",
|
||||
"mylib-test-sftp = mylib.scripts.sftp_test:main",
|
||||
"mylib-test-telltale = mylib.scripts.telltale_test:main",
|
||||
"mylib-test-telltale-check = mylib.scripts.telltale_check_test:main",
|
||||
],
|
||||
},
|
||||
)
|
77
tests.sh
Executable file
77
tests.sh
Executable file
|
@ -0,0 +1,77 @@
|
|||
#!/bin/bash
|
||||
|
||||
QUIET_ARG=""
|
||||
NO_VENV=0
|
||||
|
||||
function usage() {
|
||||
[ -n "$1" ] && echo -e "$1\n" > /dev/stderr
|
||||
echo "Usage: $0 [-x] [-q|--quiet] [--no-venv]"
|
||||
echo " -h/--help Show usage message"
|
||||
echo " -q/--quiet Enable quiet mode"
|
||||
echo " -n/--no-venv Disable venv creation and run tests on system environment"
|
||||
echo " -x Enable debug mode"
|
||||
[ -n "$1" ] && exit 1
|
||||
exit 0
|
||||
}
|
||||
|
||||
idx=1
|
||||
while [ $idx -le $# ]
|
||||
do
|
||||
OPT=${!idx}
|
||||
case $OPT in
|
||||
-h|--help)
|
||||
usage
|
||||
;;
|
||||
-q|--quiet)
|
||||
QUIET_ARG="--quiet"
|
||||
;;
|
||||
-n|--no-venv)
|
||||
NO_VENV=1
|
||||
;;
|
||||
-x)
|
||||
set -x
|
||||
;;
|
||||
*)
|
||||
usage "Unknown parameter '$OPT'"
|
||||
esac
|
||||
let idx=idx+1
|
||||
done
|
||||
[ "$1" == "--quiet" ] && QUIET_ARG="--quiet"
|
||||
|
||||
# Enter source directory
|
||||
cd $( dirname $0 )
|
||||
|
||||
TEMP_VENV=0
|
||||
VENV=""
|
||||
if [ $NO_VENV -eq 1 ]
|
||||
then
|
||||
echo "Run tests in system environment..."
|
||||
elif [ -d venv ]
|
||||
then
|
||||
VENV=$( realpath venv )
|
||||
echo "Using existing virtualenv ($VENV)..."
|
||||
else
|
||||
# Create a temporary venv
|
||||
VENV=$(mktemp -d)
|
||||
echo "Create a temporary virtualenv in $VENV..."
|
||||
TEMP_VENV=1
|
||||
python3 -m venv $VENV
|
||||
fi
|
||||
|
||||
if [ -n "$VENV" ]
|
||||
then
|
||||
echo "Install package with dev dependencies using pip in virtualenv..."
|
||||
$VENV/bin/python3 -m pip install -e ".[dev]" $QUIET_ARG
|
||||
source $VENV/bin/activate
|
||||
fi
|
||||
|
||||
# Run pre-commit
|
||||
RES=0
|
||||
echo "Run pre-commit..."
|
||||
pre-commit run --all-files
|
||||
[ $? -ne 0 ] && RES=1
|
||||
|
||||
# Clean temporary venv
|
||||
[ $TEMP_VENV -eq 1 ] && rm -fr $VENV
|
||||
|
||||
exit $RES
|
346
tests/test_config.py
Normal file
346
tests/test_config.py
Normal file
|
@ -0,0 +1,346 @@
|
|||
# pylint: disable=redefined-outer-name,missing-function-docstring,protected-access,global-statement
|
||||
# pylint: disable=global-variable-not-assigned
|
||||
""" Tests on config lib """
|
||||
|
||||
import configparser
|
||||
import logging
|
||||
import os
|
||||
|
||||
import pytest
|
||||
|
||||
from mylib.config import BooleanOption, Config, ConfigSection, StringOption
|
||||
|
||||
tested = {}
|
||||
|
||||
|
||||
def test_config_init_default_args():
|
||||
appname = "Test app"
|
||||
config = Config(appname)
|
||||
assert config.appname == appname
|
||||
assert config.version == "0.0"
|
||||
assert config.encoding == "utf-8"
|
||||
|
||||
|
||||
def test_config_init_custom_args():
|
||||
appname = "Test app"
|
||||
version = "1.43"
|
||||
encoding = "ISO-8859-1"
|
||||
config = Config(appname, version=version, encoding=encoding)
|
||||
assert config.appname == appname
|
||||
assert config.version == version
|
||||
assert config.encoding == encoding
|
||||
|
||||
|
||||
def test_add_section_default_args():
|
||||
config = Config("Test app")
|
||||
name = "test_section"
|
||||
section = config.add_section(name)
|
||||
assert isinstance(section, ConfigSection)
|
||||
assert config.sections[name] == section
|
||||
assert section.name == name
|
||||
assert section.comment is None
|
||||
assert section.order == 10
|
||||
|
||||
|
||||
def test_add_section_custom_args():
|
||||
config = Config("Test app")
|
||||
name = "test_section"
|
||||
comment = "Test"
|
||||
order = 20
|
||||
section = config.add_section(name, comment=comment, order=order)
|
||||
assert isinstance(section, ConfigSection)
|
||||
assert section.name == name
|
||||
assert section.comment == comment
|
||||
assert section.order == order
|
||||
|
||||
|
||||
def test_add_section_with_callback():
|
||||
config = Config("Test app")
|
||||
name = "test_section"
|
||||
|
||||
global tested
|
||||
tested["test_add_section_with_callback"] = False
|
||||
|
||||
def test_callback(loaded_config):
|
||||
global tested
|
||||
assert loaded_config == config
|
||||
assert tested["test_add_section_with_callback"] is False
|
||||
tested["test_add_section_with_callback"] = True
|
||||
|
||||
section = config.add_section(name, loaded_callback=test_callback)
|
||||
assert isinstance(section, ConfigSection)
|
||||
assert test_callback in config._loaded_callbacks
|
||||
assert tested["test_add_section_with_callback"] is False
|
||||
|
||||
config.parse_arguments_options(argv=[], create=False)
|
||||
assert tested["test_add_section_with_callback"] is True
|
||||
assert test_callback in config._loaded_callbacks_executed
|
||||
# Try to execute again to verify callback is not tested again
|
||||
config._loaded()
|
||||
|
||||
|
||||
def test_add_section_with_callback_already_loaded():
|
||||
config = Config("Test app")
|
||||
name = "test_section"
|
||||
config.parse_arguments_options(argv=[], create=False)
|
||||
|
||||
global tested
|
||||
tested["test_add_section_with_callback_already_loaded"] = False
|
||||
|
||||
def test_callback(loaded_config):
|
||||
global tested
|
||||
assert loaded_config == config
|
||||
assert tested["test_add_section_with_callback_already_loaded"] is False
|
||||
tested["test_add_section_with_callback_already_loaded"] = True
|
||||
|
||||
section = config.add_section(name, loaded_callback=test_callback)
|
||||
assert isinstance(section, ConfigSection)
|
||||
assert tested["test_add_section_with_callback_already_loaded"] is True
|
||||
assert test_callback in config._loaded_callbacks
|
||||
assert test_callback in config._loaded_callbacks_executed
|
||||
# Try to execute again to verify callback is not tested again
|
||||
config._loaded()
|
||||
|
||||
|
||||
def test_add_option_default_args():
|
||||
config = Config("Test app")
|
||||
section = config.add_section("my_section")
|
||||
assert isinstance(section, ConfigSection)
|
||||
name = "my_option"
|
||||
option = section.add_option(StringOption, name)
|
||||
assert isinstance(option, StringOption)
|
||||
assert name in section.options and section.options[name] == option
|
||||
assert option.config == config
|
||||
assert option.section == section
|
||||
assert option.name == name
|
||||
assert option.default is None
|
||||
assert option.comment is None
|
||||
assert option.no_arg is False
|
||||
assert option.arg is None
|
||||
assert option.short_arg is None
|
||||
assert option.arg_help is None
|
||||
|
||||
|
||||
def test_add_option_custom_args():
|
||||
config = Config("Test app")
|
||||
section = config.add_section("my_section")
|
||||
assert isinstance(section, ConfigSection)
|
||||
name = "my_option"
|
||||
kwargs = {
|
||||
"default": "default value",
|
||||
"comment": "my comment",
|
||||
"no_arg": True,
|
||||
"arg": "--my-option",
|
||||
"short_arg": "-M",
|
||||
"arg_help": "My help",
|
||||
}
|
||||
option = section.add_option(StringOption, name, **kwargs)
|
||||
assert isinstance(option, StringOption)
|
||||
assert name in section.options and section.options[name] == option
|
||||
|
||||
assert option.config == config
|
||||
assert option.section == section
|
||||
assert option.name == name
|
||||
for arg, value in kwargs.items():
|
||||
assert getattr(option, arg) == value
|
||||
|
||||
|
||||
def test_defined():
|
||||
config = Config("Test app")
|
||||
section_name = "my_section"
|
||||
opt_name = "my_option"
|
||||
assert not config.defined(section_name, opt_name)
|
||||
|
||||
section = config.add_section("my_section")
|
||||
assert isinstance(section, ConfigSection)
|
||||
section.add_option(StringOption, opt_name)
|
||||
|
||||
assert config.defined(section_name, opt_name)
|
||||
|
||||
|
||||
def test_isset():
|
||||
config = Config("Test app")
|
||||
section_name = "my_section"
|
||||
opt_name = "my_option"
|
||||
assert not config.isset(section_name, opt_name)
|
||||
|
||||
section = config.add_section("my_section")
|
||||
assert isinstance(section, ConfigSection)
|
||||
option = section.add_option(StringOption, opt_name)
|
||||
|
||||
assert not config.isset(section_name, opt_name)
|
||||
|
||||
config.parse_arguments_options(argv=[option.parser_argument_name, "value"], create=False)
|
||||
|
||||
assert config.isset(section_name, opt_name)
|
||||
|
||||
|
||||
def test_not_isset():
|
||||
config = Config("Test app")
|
||||
section_name = "my_section"
|
||||
opt_name = "my_option"
|
||||
assert not config.isset(section_name, opt_name)
|
||||
|
||||
section = config.add_section("my_section")
|
||||
assert isinstance(section, ConfigSection)
|
||||
section.add_option(StringOption, opt_name)
|
||||
|
||||
assert not config.isset(section_name, opt_name)
|
||||
|
||||
config.parse_arguments_options(argv=[], create=False)
|
||||
|
||||
assert not config.isset(section_name, opt_name)
|
||||
|
||||
|
||||
def test_get():
|
||||
config = Config("Test app")
|
||||
section_name = "my_section"
|
||||
opt_name = "my_option"
|
||||
opt_value = "value"
|
||||
section = config.add_section("my_section")
|
||||
option = section.add_option(StringOption, opt_name)
|
||||
config.parse_arguments_options(argv=[option.parser_argument_name, opt_value], create=False)
|
||||
|
||||
assert config.get(section_name, opt_name) == opt_value
|
||||
|
||||
|
||||
def test_get_default():
|
||||
config = Config("Test app")
|
||||
section_name = "my_section"
|
||||
opt_name = "my_option"
|
||||
opt_default_value = "value"
|
||||
section = config.add_section("my_section")
|
||||
section.add_option(StringOption, opt_name, default=opt_default_value)
|
||||
config.parse_arguments_options(argv=[], create=False)
|
||||
|
||||
assert config.get(section_name, opt_name) == opt_default_value
|
||||
|
||||
|
||||
def test_logging_splited_stdout_stderr(capsys):
|
||||
config = Config("Test app")
|
||||
config.parse_arguments_options(argv=["-C", "-v"], create=False)
|
||||
info_msg = "[info]"
|
||||
err_msg = "[error]"
|
||||
logging.getLogger().info(info_msg)
|
||||
logging.getLogger().error(err_msg)
|
||||
captured = capsys.readouterr()
|
||||
assert info_msg in captured.out
|
||||
assert info_msg not in captured.err
|
||||
assert err_msg in captured.err
|
||||
assert err_msg not in captured.out
|
||||
|
||||
|
||||
#
|
||||
# Test option types
|
||||
#
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def config_with_file(tmpdir):
|
||||
config = Config("Test app")
|
||||
config_dir = tmpdir.mkdir("config")
|
||||
config_file = config_dir.join("config.ini")
|
||||
config.save(os.path.join(config_file.dirname, config_file.basename))
|
||||
return config
|
||||
|
||||
|
||||
def generate_mock_input(expected_prompt, input_value):
|
||||
def mock_input(self, prompt): # pylint: disable=unused-argument
|
||||
assert prompt == expected_prompt
|
||||
return input_value
|
||||
|
||||
return mock_input
|
||||
|
||||
|
||||
# Boolean option
|
||||
|
||||
|
||||
def test_boolean_option_from_config(config_with_file):
|
||||
section = config_with_file.add_section("test")
|
||||
default = True
|
||||
option = section.add_option(BooleanOption, "test_bool", default=default)
|
||||
config_with_file.save()
|
||||
|
||||
option.set(not default)
|
||||
assert option._from_config is not default
|
||||
|
||||
option.set(default)
|
||||
assert not option._isset_in_config_file
|
||||
with pytest.raises(configparser.NoOptionError):
|
||||
assert option._from_config is default
|
||||
|
||||
|
||||
def test_boolean_option_ask_value(mocker):
|
||||
config = Config("Test app")
|
||||
section = config.add_section("test")
|
||||
name = "test_bool"
|
||||
option = section.add_option(BooleanOption, name, default=True)
|
||||
|
||||
mocker.patch(
|
||||
"mylib.config.BooleanOption._get_user_input", generate_mock_input(f"{name}: [Y/n] ", "y")
|
||||
)
|
||||
assert option.ask_value(set_it=False) is True
|
||||
|
||||
mocker.patch(
|
||||
"mylib.config.BooleanOption._get_user_input", generate_mock_input(f"{name}: [Y/n] ", "Y")
|
||||
)
|
||||
assert option.ask_value(set_it=False) is True
|
||||
|
||||
mocker.patch(
|
||||
"mylib.config.BooleanOption._get_user_input", generate_mock_input(f"{name}: [Y/n] ", "")
|
||||
)
|
||||
assert option.ask_value(set_it=False) is True
|
||||
|
||||
mocker.patch(
|
||||
"mylib.config.BooleanOption._get_user_input", generate_mock_input(f"{name}: [Y/n] ", "n")
|
||||
)
|
||||
assert option.ask_value(set_it=False) is False
|
||||
|
||||
mocker.patch(
|
||||
"mylib.config.BooleanOption._get_user_input", generate_mock_input(f"{name}: [Y/n] ", "N")
|
||||
)
|
||||
assert option.ask_value(set_it=False) is False
|
||||
|
||||
|
||||
def test_boolean_option_to_config():
|
||||
config = Config("Test app")
|
||||
section = config.add_section("test")
|
||||
default = True
|
||||
option = section.add_option(BooleanOption, "test_bool", default=default)
|
||||
assert option.to_config(True) == "true"
|
||||
assert option.to_config(False) == "false"
|
||||
|
||||
|
||||
def test_boolean_option_export_to_config(config_with_file):
|
||||
section = config_with_file.add_section("test")
|
||||
name = "test_bool"
|
||||
comment = "Test boolean"
|
||||
default = True
|
||||
|
||||
option = section.add_option(BooleanOption, name, default=default, comment=comment)
|
||||
|
||||
assert (
|
||||
option.export_to_config()
|
||||
== f"""# {comment}
|
||||
# Default: {str(default).lower()}
|
||||
# {name} =
|
||||
"""
|
||||
)
|
||||
|
||||
option.set(not default)
|
||||
assert (
|
||||
option.export_to_config()
|
||||
== f"""# {comment}
|
||||
# Default: {str(default).lower()}
|
||||
{name} = {str(not default).lower()}
|
||||
"""
|
||||
)
|
||||
|
||||
option.set(default)
|
||||
assert (
|
||||
option.export_to_config()
|
||||
== f"""# {comment}
|
||||
# Default: {str(default).lower()}
|
||||
# {name} =
|
||||
"""
|
||||
)
|
498
tests/test_mysql.py
Normal file
498
tests/test_mysql.py
Normal file
|
@ -0,0 +1,498 @@
|
|||
# pylint: disable=redefined-outer-name,missing-function-docstring,protected-access
|
||||
""" Tests on opening hours helpers """
|
||||
|
||||
import pytest
|
||||
from MySQLdb._exceptions import Error
|
||||
|
||||
from mylib.mysql import MyDB
|
||||
|
||||
|
||||
class FakeMySQLdbCursor:
|
||||
"""Fake MySQLdb cursor"""
|
||||
|
||||
def __init__(
|
||||
self, expected_sql, expected_params, expected_return, expected_just_try, expected_exception
|
||||
):
|
||||
self.expected_sql = expected_sql
|
||||
self.expected_params = expected_params
|
||||
self.expected_return = expected_return
|
||||
self.expected_just_try = expected_just_try
|
||||
self.expected_exception = expected_exception
|
||||
|
||||
def execute(self, sql, params=None):
|
||||
if self.expected_exception:
|
||||
raise Error(f"{self}.execute({sql}, {params}): expected exception")
|
||||
if self.expected_just_try and not sql.lower().startswith("select "):
|
||||
assert False, f"{self}.execute({sql}, {params}) may not be executed in just try mode"
|
||||
# pylint: disable=consider-using-f-string
|
||||
assert (
|
||||
sql == self.expected_sql
|
||||
), "%s.execute(): Invalid SQL query:\n '%s'\nMay be:\n '%s'" % (
|
||||
self,
|
||||
sql,
|
||||
self.expected_sql,
|
||||
)
|
||||
# pylint: disable=consider-using-f-string
|
||||
assert (
|
||||
params == self.expected_params
|
||||
), "%s.execute(): Invalid params:\n %s\nMay be:\n %s" % (
|
||||
self,
|
||||
params,
|
||||
self.expected_params,
|
||||
)
|
||||
return self.expected_return
|
||||
|
||||
@property
|
||||
def description(self):
|
||||
assert self.expected_return
|
||||
assert isinstance(self.expected_return, list)
|
||||
assert isinstance(self.expected_return[0], dict)
|
||||
return [(field, 1, 2, 3) for field in self.expected_return[0].keys()]
|
||||
|
||||
def fetchall(self):
|
||||
if isinstance(self.expected_return, list):
|
||||
return (
|
||||
list(row.values()) if isinstance(row, dict) else row for row in self.expected_return
|
||||
)
|
||||
return self.expected_return
|
||||
|
||||
def __repr__(self):
|
||||
return (
|
||||
f"FakeMySQLdbCursor({self.expected_sql}, {self.expected_params}, "
|
||||
f"{self.expected_return}, {self.expected_just_try})"
|
||||
)
|
||||
|
||||
|
||||
class FakeMySQLdb:
|
||||
"""Fake MySQLdb connection"""
|
||||
|
||||
expected_sql = None
|
||||
expected_params = None
|
||||
expected_return = True
|
||||
expected_just_try = False
|
||||
expected_exception = False
|
||||
just_try = False
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
allowed_kwargs = {
|
||||
"db": str,
|
||||
"user": str,
|
||||
"passwd": (str, None),
|
||||
"host": str,
|
||||
"charset": str,
|
||||
"use_unicode": bool,
|
||||
}
|
||||
for arg, value in kwargs.items():
|
||||
assert arg in allowed_kwargs, f'Invalid arg {arg}="{value}"'
|
||||
assert isinstance(
|
||||
value, allowed_kwargs[arg]
|
||||
), f"Arg {arg} not a {allowed_kwargs[arg]} ({type(value)})"
|
||||
setattr(self, arg, value)
|
||||
|
||||
def close(self):
|
||||
return self.expected_return
|
||||
|
||||
def cursor(self):
|
||||
return FakeMySQLdbCursor(
|
||||
self.expected_sql,
|
||||
self.expected_params,
|
||||
self.expected_return,
|
||||
self.expected_just_try or self.just_try,
|
||||
self.expected_exception,
|
||||
)
|
||||
|
||||
def commit(self):
|
||||
self._check_just_try()
|
||||
return self.expected_return
|
||||
|
||||
def rollback(self):
|
||||
self._check_just_try()
|
||||
return self.expected_return
|
||||
|
||||
def _check_just_try(self):
|
||||
if self.just_try:
|
||||
assert False, "May not be executed in just try mode"
|
||||
|
||||
|
||||
def fake_mysqldb_connect(**kwargs):
|
||||
return FakeMySQLdb(**kwargs)
|
||||
|
||||
|
||||
def fake_mysqldb_connect_just_try(**kwargs):
|
||||
con = FakeMySQLdb(**kwargs)
|
||||
con.just_try = True
|
||||
return con
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_mydb():
|
||||
return MyDB("127.0.0.1", "user", "password", "dbname")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def fake_mydb(mocker):
|
||||
mocker.patch("MySQLdb.connect", fake_mysqldb_connect)
|
||||
return MyDB("127.0.0.1", "user", "password", "dbname")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def fake_just_try_mydb(mocker):
|
||||
mocker.patch("MySQLdb.connect", fake_mysqldb_connect_just_try)
|
||||
return MyDB("127.0.0.1", "user", "password", "dbname", just_try=True)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def fake_connected_mydb(fake_mydb):
|
||||
fake_mydb.connect()
|
||||
return fake_mydb
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def fake_connected_just_try_mydb(fake_just_try_mydb):
|
||||
fake_just_try_mydb.connect()
|
||||
return fake_just_try_mydb
|
||||
|
||||
|
||||
def generate_mock_args(
|
||||
expected_args=(), expected_kwargs={}, expected_return=True
|
||||
): # pylint: disable=dangerous-default-value
|
||||
def mock_args(*args, **kwargs):
|
||||
# pylint: disable=consider-using-f-string
|
||||
assert args == expected_args, "Invalid call args:\n %s\nMay be:\n %s" % (
|
||||
args,
|
||||
expected_args,
|
||||
)
|
||||
# pylint: disable=consider-using-f-string
|
||||
assert kwargs == expected_kwargs, "Invalid call kwargs:\n %s\nMay be:\n %s" % (
|
||||
kwargs,
|
||||
expected_kwargs,
|
||||
)
|
||||
return expected_return
|
||||
|
||||
return mock_args
|
||||
|
||||
|
||||
def mock_doSQL_just_try(self, sql, params=None): # pylint: disable=unused-argument
|
||||
assert False, "doSQL() may not be executed in just try mode"
|
||||
|
||||
|
||||
def generate_mock_doSQL(
|
||||
expected_sql, expected_params={}, expected_return=True
|
||||
): # pylint: disable=dangerous-default-value
|
||||
def mock_doSQL(self, sql, params=None): # pylint: disable=unused-argument
|
||||
# pylint: disable=consider-using-f-string
|
||||
assert sql == expected_sql, "Invalid generated SQL query:\n '%s'\nMay be:\n '%s'" % (
|
||||
sql,
|
||||
expected_sql,
|
||||
)
|
||||
# pylint: disable=consider-using-f-string
|
||||
assert params == expected_params, "Invalid generated params:\n %s\nMay be:\n %s" % (
|
||||
params,
|
||||
expected_params,
|
||||
)
|
||||
return expected_return
|
||||
|
||||
return mock_doSQL
|
||||
|
||||
|
||||
# MyDB.doSelect() have same expected parameters as MyDB.doSQL()
|
||||
generate_mock_doSelect = generate_mock_doSQL
|
||||
mock_doSelect_just_try = mock_doSQL_just_try
|
||||
|
||||
#
|
||||
# Test on MyDB helper methods
|
||||
#
|
||||
|
||||
|
||||
def test_combine_params_with_to_add_parameter():
|
||||
assert MyDB._combine_params({"test1": 1}, {"test2": 2}) == {"test1": 1, "test2": 2}
|
||||
|
||||
|
||||
def test_combine_params_with_kargs():
|
||||
assert MyDB._combine_params({"test1": 1}, test2=2) == {"test1": 1, "test2": 2}
|
||||
|
||||
|
||||
def test_combine_params_with_kargs_and_to_add_parameter():
|
||||
assert MyDB._combine_params({"test1": 1}, {"test2": 2}, test3=3) == {
|
||||
"test1": 1,
|
||||
"test2": 2,
|
||||
"test3": 3,
|
||||
}
|
||||
|
||||
|
||||
def test_format_where_clauses_params_are_preserved():
|
||||
args = ("test = test", {"test1": 1})
|
||||
assert MyDB._format_where_clauses(*args) == args
|
||||
|
||||
|
||||
def test_format_where_clauses_raw():
|
||||
assert MyDB._format_where_clauses("test = test") == ("test = test", {})
|
||||
|
||||
|
||||
def test_format_where_clauses_tuple_clause_with_params():
|
||||
where_clauses = ("test1 = %(test1)s AND test2 = %(test2)s", {"test1": 1, "test2": 2})
|
||||
assert MyDB._format_where_clauses(where_clauses) == where_clauses
|
||||
|
||||
|
||||
def test_format_where_clauses_with_list_as_value():
|
||||
assert MyDB._format_where_clauses({"test": [1, 2]}) == (
|
||||
"`test` IN (%(test_0)s, %(test_1)s)",
|
||||
{"test_0": 1, "test_1": 2},
|
||||
)
|
||||
|
||||
|
||||
def test_format_where_clauses_dict():
|
||||
where_clauses = {"test1": 1, "test2": 2}
|
||||
assert MyDB._format_where_clauses(where_clauses) == (
|
||||
"`test1` = %(test1)s AND `test2` = %(test2)s",
|
||||
where_clauses,
|
||||
)
|
||||
|
||||
|
||||
def test_format_where_clauses_combined_types():
|
||||
where_clauses = ("test1 = 1", ("test2 LIKE %(test2)s", {"test2": 2}), {"test3": 3, "test4": 4})
|
||||
assert MyDB._format_where_clauses(where_clauses) == (
|
||||
"test1 = 1 AND test2 LIKE %(test2)s AND `test3` = %(test3)s AND `test4` = %(test4)s",
|
||||
{"test2": 2, "test3": 3, "test4": 4},
|
||||
)
|
||||
|
||||
|
||||
def test_format_where_clauses_with_where_op():
|
||||
where_clauses = {"test1": 1, "test2": 2}
|
||||
assert MyDB._format_where_clauses(where_clauses, where_op="OR") == (
|
||||
"`test1` = %(test1)s OR `test2` = %(test2)s",
|
||||
where_clauses,
|
||||
)
|
||||
|
||||
|
||||
def test_add_where_clauses():
|
||||
sql = "SELECT * FROM table"
|
||||
where_clauses = {"test1": 1, "test2": 2}
|
||||
assert MyDB._add_where_clauses(sql, None, where_clauses) == (
|
||||
sql + " WHERE `test1` = %(test1)s AND `test2` = %(test2)s",
|
||||
where_clauses,
|
||||
)
|
||||
|
||||
|
||||
def test_add_where_clauses_preserved_params():
|
||||
sql = "SELECT * FROM table"
|
||||
where_clauses = {"test1": 1, "test2": 2}
|
||||
params = {"fake1": 1}
|
||||
assert MyDB._add_where_clauses(sql, params.copy(), where_clauses) == (
|
||||
sql + " WHERE `test1` = %(test1)s AND `test2` = %(test2)s",
|
||||
{**where_clauses, **params},
|
||||
)
|
||||
|
||||
|
||||
def test_add_where_clauses_with_op():
|
||||
sql = "SELECT * FROM table"
|
||||
where_clauses = ("test1=1", "test2=2")
|
||||
assert MyDB._add_where_clauses(sql, None, where_clauses, where_op="OR") == (
|
||||
sql + " WHERE test1=1 OR test2=2",
|
||||
{},
|
||||
)
|
||||
|
||||
|
||||
def test_add_where_clauses_with_duplicated_field():
|
||||
sql = "UPDATE table SET test1=%(test1)s"
|
||||
params = {"test1": "new_value"}
|
||||
where_clauses = {"test1": "where_value"}
|
||||
assert MyDB._add_where_clauses(sql, params, where_clauses) == (
|
||||
sql + " WHERE `test1` = %(test1_1)s",
|
||||
{"test1": "new_value", "test1_1": "where_value"},
|
||||
)
|
||||
|
||||
|
||||
def test_quote_table_name():
|
||||
assert MyDB._quote_table_name("mytable") == "`mytable`"
|
||||
assert MyDB._quote_table_name("myschema.mytable") == "`myschema`.`mytable`"
|
||||
|
||||
|
||||
def test_insert(mocker, test_mydb):
|
||||
values = {"test1": 1, "test2": 2}
|
||||
mocker.patch(
|
||||
"mylib.mysql.MyDB.doSQL",
|
||||
generate_mock_doSQL(
|
||||
"INSERT INTO `mytable` (`test1`, `test2`) VALUES (%(test1)s, %(test2)s)", values
|
||||
),
|
||||
)
|
||||
|
||||
assert test_mydb.insert("mytable", values)
|
||||
|
||||
|
||||
def test_insert_just_try(mocker, test_mydb):
|
||||
mocker.patch("mylib.mysql.MyDB.doSQL", mock_doSQL_just_try)
|
||||
assert test_mydb.insert("mytable", {"test1": 1, "test2": 2}, just_try=True)
|
||||
|
||||
|
||||
def test_update(mocker, test_mydb):
|
||||
values = {"test1": 1, "test2": 2}
|
||||
where_clauses = {"test3": 3, "test4": 4}
|
||||
mocker.patch(
|
||||
"mylib.mysql.MyDB.doSQL",
|
||||
generate_mock_doSQL(
|
||||
"UPDATE `mytable` SET `test1` = %(test1)s, `test2` = %(test2)s WHERE `test3` ="
|
||||
" %(test3)s AND `test4` = %(test4)s",
|
||||
{**values, **where_clauses},
|
||||
),
|
||||
)
|
||||
|
||||
assert test_mydb.update("mytable", values, where_clauses)
|
||||
|
||||
|
||||
def test_update_just_try(mocker, test_mydb):
|
||||
mocker.patch("mylib.mysql.MyDB.doSQL", mock_doSQL_just_try)
|
||||
assert test_mydb.update("mytable", {"test1": 1, "test2": 2}, None, just_try=True)
|
||||
|
||||
|
||||
def test_delete(mocker, test_mydb):
|
||||
where_clauses = {"test1": 1, "test2": 2}
|
||||
mocker.patch(
|
||||
"mylib.mysql.MyDB.doSQL",
|
||||
generate_mock_doSQL(
|
||||
"DELETE FROM `mytable` WHERE `test1` = %(test1)s AND `test2` = %(test2)s", where_clauses
|
||||
),
|
||||
)
|
||||
|
||||
assert test_mydb.delete("mytable", where_clauses)
|
||||
|
||||
|
||||
def test_delete_just_try(mocker, test_mydb):
|
||||
mocker.patch("mylib.mysql.MyDB.doSQL", mock_doSQL_just_try)
|
||||
assert test_mydb.delete("mytable", None, just_try=True)
|
||||
|
||||
|
||||
def test_truncate(mocker, test_mydb):
|
||||
mocker.patch("mylib.mysql.MyDB.doSQL", generate_mock_doSQL("TRUNCATE TABLE `mytable`", None))
|
||||
|
||||
assert test_mydb.truncate("mytable")
|
||||
|
||||
|
||||
def test_truncate_just_try(mocker, test_mydb):
|
||||
mocker.patch("mylib.mysql.MyDB.doSQL", mock_doSelect_just_try)
|
||||
assert test_mydb.truncate("mytable", just_try=True)
|
||||
|
||||
|
||||
def test_select(mocker, test_mydb):
|
||||
fields = ("field1", "field2")
|
||||
where_clauses = {"test3": 3, "test4": 4}
|
||||
expected_return = [
|
||||
{"field1": 1, "field2": 2},
|
||||
{"field1": 2, "field2": 3},
|
||||
]
|
||||
order_by = "field1, DESC"
|
||||
limit = 10
|
||||
mocker.patch(
|
||||
"mylib.mysql.MyDB.doSelect",
|
||||
generate_mock_doSQL(
|
||||
"SELECT `field1`, `field2` FROM `mytable` WHERE `test3` = %(test3)s AND `test4` ="
|
||||
" %(test4)s ORDER BY " + order_by + " LIMIT " + str(limit), # nosec: B608
|
||||
where_clauses,
|
||||
expected_return,
|
||||
),
|
||||
)
|
||||
|
||||
assert (
|
||||
test_mydb.select("mytable", where_clauses, fields, order_by=order_by, limit=limit)
|
||||
== expected_return
|
||||
)
|
||||
|
||||
|
||||
def test_select_without_field_and_order_by(mocker, test_mydb):
|
||||
mocker.patch("mylib.mysql.MyDB.doSelect", generate_mock_doSQL("SELECT * FROM `mytable`"))
|
||||
|
||||
assert test_mydb.select("mytable")
|
||||
|
||||
|
||||
def test_select_just_try(mocker, test_mydb):
|
||||
mocker.patch("mylib.mysql.MyDB.doSQL", mock_doSelect_just_try)
|
||||
assert test_mydb.select("mytable", None, None, just_try=True)
|
||||
|
||||
|
||||
#
|
||||
# Tests on main methods
|
||||
#
|
||||
|
||||
|
||||
def test_connect(mocker, test_mydb):
|
||||
expected_kwargs = {
|
||||
"db": test_mydb._db,
|
||||
"user": test_mydb._user,
|
||||
"host": test_mydb._host,
|
||||
"passwd": test_mydb._pwd,
|
||||
"charset": test_mydb._charset,
|
||||
"use_unicode": True,
|
||||
}
|
||||
|
||||
mocker.patch("MySQLdb.connect", generate_mock_args(expected_kwargs=expected_kwargs))
|
||||
|
||||
assert test_mydb.connect()
|
||||
|
||||
|
||||
def test_close(fake_mydb):
|
||||
assert fake_mydb.close() is None
|
||||
|
||||
|
||||
def test_close_connected(fake_connected_mydb):
|
||||
assert fake_connected_mydb.close() is None
|
||||
|
||||
|
||||
def test_doSQL(fake_connected_mydb):
|
||||
fake_connected_mydb._conn.expected_sql = "DELETE FROM table WHERE test1 = %(test1)s"
|
||||
fake_connected_mydb._conn.expected_params = {"test1": 1}
|
||||
fake_connected_mydb.doSQL(
|
||||
fake_connected_mydb._conn.expected_sql, fake_connected_mydb._conn.expected_params
|
||||
)
|
||||
|
||||
|
||||
def test_doSQL_without_params(fake_connected_mydb):
|
||||
fake_connected_mydb._conn.expected_sql = "DELETE FROM table"
|
||||
fake_connected_mydb.doSQL(fake_connected_mydb._conn.expected_sql)
|
||||
|
||||
|
||||
def test_doSQL_just_try(fake_connected_just_try_mydb):
|
||||
assert fake_connected_just_try_mydb.doSQL("DELETE FROM table")
|
||||
|
||||
|
||||
def test_doSQL_on_exception(fake_connected_mydb):
|
||||
fake_connected_mydb._conn.expected_exception = True
|
||||
assert fake_connected_mydb.doSQL("DELETE FROM table") is False
|
||||
|
||||
|
||||
def test_doSelect(fake_connected_mydb):
|
||||
fake_connected_mydb._conn.expected_sql = "SELECT * FROM table WHERE test1 = %(test1)s"
|
||||
fake_connected_mydb._conn.expected_params = {"test1": 1}
|
||||
fake_connected_mydb._conn.expected_return = [{"test1": 1}]
|
||||
assert (
|
||||
fake_connected_mydb.doSelect(
|
||||
fake_connected_mydb._conn.expected_sql, fake_connected_mydb._conn.expected_params
|
||||
)
|
||||
== fake_connected_mydb._conn.expected_return
|
||||
)
|
||||
|
||||
|
||||
def test_doSelect_without_params(fake_connected_mydb):
|
||||
fake_connected_mydb._conn.expected_sql = "SELECT * FROM table"
|
||||
fake_connected_mydb._conn.expected_return = [{"test1": 1}]
|
||||
assert (
|
||||
fake_connected_mydb.doSelect(fake_connected_mydb._conn.expected_sql)
|
||||
== fake_connected_mydb._conn.expected_return
|
||||
)
|
||||
|
||||
|
||||
def test_doSelect_on_exception(fake_connected_mydb):
|
||||
fake_connected_mydb._conn.expected_exception = True
|
||||
assert fake_connected_mydb.doSelect("SELECT * FROM table") is False
|
||||
|
||||
|
||||
def test_doSelect_just_try(fake_connected_just_try_mydb):
|
||||
fake_connected_just_try_mydb._conn.expected_sql = "SELECT * FROM table WHERE test1 = %(test1)s"
|
||||
fake_connected_just_try_mydb._conn.expected_params = {"test1": 1}
|
||||
fake_connected_just_try_mydb._conn.expected_return = [{"test1": 1}]
|
||||
assert (
|
||||
fake_connected_just_try_mydb.doSelect(
|
||||
fake_connected_just_try_mydb._conn.expected_sql,
|
||||
fake_connected_just_try_mydb._conn.expected_params,
|
||||
)
|
||||
== fake_connected_just_try_mydb._conn.expected_return
|
||||
)
|
685
tests/test_opening_hours.py
Normal file
685
tests/test_opening_hours.py
Normal file
|
@ -0,0 +1,685 @@
|
|||
# pylint: disable=missing-function-docstring
|
||||
""" Tests on opening hours helpers """
|
||||
|
||||
import datetime
|
||||
|
||||
import pytest
|
||||
|
||||
from mylib import opening_hours
|
||||
|
||||
#
|
||||
# Test on parse_exceptional_closures()
|
||||
#
|
||||
|
||||
|
||||
def test_parse_exceptional_closures_one_day_without_time_period():
|
||||
assert opening_hours.parse_exceptional_closures(["22/09/2017"]) == [
|
||||
{"days": [datetime.date(2017, 9, 22)], "hours_periods": []}
|
||||
]
|
||||
|
||||
|
||||
def test_parse_exceptional_closures_one_day_with_time_period():
|
||||
assert opening_hours.parse_exceptional_closures(["26/11/2017 9h30-12h30"]) == [
|
||||
{
|
||||
"days": [datetime.date(2017, 11, 26)],
|
||||
"hours_periods": [{"start": datetime.time(9, 30), "stop": datetime.time(12, 30)}],
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
def test_parse_exceptional_closures_one_day_with_multiple_time_periods():
|
||||
assert opening_hours.parse_exceptional_closures(["26/11/2017 9h30-12h30 14h-18h"]) == [
|
||||
{
|
||||
"days": [datetime.date(2017, 11, 26)],
|
||||
"hours_periods": [
|
||||
{"start": datetime.time(9, 30), "stop": datetime.time(12, 30)},
|
||||
{"start": datetime.time(14, 0), "stop": datetime.time(18, 0)},
|
||||
],
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
def test_parse_exceptional_closures_full_days_period():
|
||||
assert opening_hours.parse_exceptional_closures(["20/09/2017-22/09/2017"]) == [
|
||||
{
|
||||
"days": [
|
||||
datetime.date(2017, 9, 20),
|
||||
datetime.date(2017, 9, 21),
|
||||
datetime.date(2017, 9, 22),
|
||||
],
|
||||
"hours_periods": [],
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
def test_parse_exceptional_closures_invalid_days_period():
|
||||
with pytest.raises(ValueError):
|
||||
opening_hours.parse_exceptional_closures(["22/09/2017-21/09/2017"])
|
||||
|
||||
|
||||
def test_parse_exceptional_closures_days_period_with_time_period():
|
||||
assert opening_hours.parse_exceptional_closures(["20/09/2017-22/09/2017 9h-12h"]) == [
|
||||
{
|
||||
"days": [
|
||||
datetime.date(2017, 9, 20),
|
||||
datetime.date(2017, 9, 21),
|
||||
datetime.date(2017, 9, 22),
|
||||
],
|
||||
"hours_periods": [{"start": datetime.time(9, 0), "stop": datetime.time(12, 0)}],
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
def test_parse_exceptional_closures_time_period_without_days():
|
||||
with pytest.raises(ValueError):
|
||||
opening_hours.parse_exceptional_closures(["9h-12h"])
|
||||
|
||||
|
||||
def test_parse_exceptional_closures_invalid_time_period():
|
||||
with pytest.raises(ValueError):
|
||||
opening_hours.parse_exceptional_closures(["20/09/2017 9h-8h"])
|
||||
|
||||
|
||||
def test_parse_exceptional_closures_multiple_periods():
|
||||
assert opening_hours.parse_exceptional_closures(
|
||||
["20/09/2017 25/11/2017-26/11/2017 9h30-12h30 14h-18h"]
|
||||
) == [
|
||||
{
|
||||
"days": [
|
||||
datetime.date(2017, 9, 20),
|
||||
datetime.date(2017, 11, 25),
|
||||
datetime.date(2017, 11, 26),
|
||||
],
|
||||
"hours_periods": [
|
||||
{"start": datetime.time(9, 30), "stop": datetime.time(12, 30)},
|
||||
{"start": datetime.time(14, 0), "stop": datetime.time(18, 0)},
|
||||
],
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
#
|
||||
# Tests on parse_normal_opening_hours()
|
||||
#
|
||||
|
||||
|
||||
def test_parse_normal_opening_hours_one_day():
|
||||
assert opening_hours.parse_normal_opening_hours(["jeudi"]) == [
|
||||
{"days": ["jeudi"], "hours_periods": []}
|
||||
]
|
||||
|
||||
|
||||
def test_parse_normal_opening_hours_multiple_days():
|
||||
assert opening_hours.parse_normal_opening_hours(["lundi jeudi"]) == [
|
||||
{"days": ["lundi", "jeudi"], "hours_periods": []}
|
||||
]
|
||||
|
||||
|
||||
def test_parse_normal_opening_hours_invalid_day():
|
||||
with pytest.raises(ValueError):
|
||||
opening_hours.parse_exceptional_closures(["invalid"])
|
||||
|
||||
|
||||
def test_parse_normal_opening_hours_one_days_period():
|
||||
assert opening_hours.parse_normal_opening_hours(["lundi-jeudi"]) == [
|
||||
{"days": ["lundi", "mardi", "mercredi", "jeudi"], "hours_periods": []}
|
||||
]
|
||||
|
||||
|
||||
def test_parse_normal_opening_hours_one_day_with_one_time_period():
|
||||
assert opening_hours.parse_normal_opening_hours(["jeudi 9h-12h"]) == [
|
||||
{
|
||||
"days": ["jeudi"],
|
||||
"hours_periods": [{"start": datetime.time(9, 0), "stop": datetime.time(12, 0)}],
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
def test_parse_normal_opening_hours_invalid_days_period():
|
||||
with pytest.raises(ValueError):
|
||||
opening_hours.parse_normal_opening_hours(["jeudi-mardi"])
|
||||
with pytest.raises(ValueError):
|
||||
opening_hours.parse_normal_opening_hours(["lundi-mardi-mercredi"])
|
||||
|
||||
|
||||
def test_parse_normal_opening_hours_one_time_period():
|
||||
assert opening_hours.parse_normal_opening_hours(["9h-18h30"]) == [
|
||||
{
|
||||
"days": [],
|
||||
"hours_periods": [{"start": datetime.time(9, 0), "stop": datetime.time(18, 30)}],
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
def test_parse_normal_opening_hours_invalid_time_period():
|
||||
with pytest.raises(ValueError):
|
||||
opening_hours.parse_normal_opening_hours(["12h-10h"])
|
||||
|
||||
|
||||
def test_parse_normal_opening_hours_multiple_periods():
|
||||
assert opening_hours.parse_normal_opening_hours(
|
||||
["lundi-vendredi 9h30-12h30 14h-18h", "samedi 9h30-18h", "dimanche 9h30-12h"]
|
||||
) == [
|
||||
{
|
||||
"days": ["lundi", "mardi", "mercredi", "jeudi", "vendredi"],
|
||||
"hours_periods": [
|
||||
{"start": datetime.time(9, 30), "stop": datetime.time(12, 30)},
|
||||
{"start": datetime.time(14, 0), "stop": datetime.time(18, 0)},
|
||||
],
|
||||
},
|
||||
{
|
||||
"days": ["samedi"],
|
||||
"hours_periods": [
|
||||
{"start": datetime.time(9, 30), "stop": datetime.time(18, 0)},
|
||||
],
|
||||
},
|
||||
{
|
||||
"days": ["dimanche"],
|
||||
"hours_periods": [
|
||||
{"start": datetime.time(9, 30), "stop": datetime.time(12, 0)},
|
||||
],
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
def test_parse_normal_opening_hours_is_sorted():
|
||||
assert opening_hours.parse_normal_opening_hours(
|
||||
[
|
||||
"samedi 9h30-18h",
|
||||
"lundi-vendredi 14h-18h 9h30-12h30",
|
||||
"samedi 9h30-12h",
|
||||
"dimanche 9h30-12h",
|
||||
]
|
||||
) == [
|
||||
{
|
||||
"days": ["lundi", "mardi", "mercredi", "jeudi", "vendredi"],
|
||||
"hours_periods": [
|
||||
{"start": datetime.time(9, 30), "stop": datetime.time(12, 30)},
|
||||
{"start": datetime.time(14, 0), "stop": datetime.time(18, 0)},
|
||||
],
|
||||
},
|
||||
{
|
||||
"days": ["samedi"],
|
||||
"hours_periods": [
|
||||
{"start": datetime.time(9, 30), "stop": datetime.time(12, 0)},
|
||||
],
|
||||
},
|
||||
{
|
||||
"days": ["samedi"],
|
||||
"hours_periods": [
|
||||
{"start": datetime.time(9, 30), "stop": datetime.time(18, 0)},
|
||||
],
|
||||
},
|
||||
{
|
||||
"days": ["dimanche"],
|
||||
"hours_periods": [
|
||||
{"start": datetime.time(9, 30), "stop": datetime.time(12, 0)},
|
||||
],
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
#
|
||||
# Tests on normal opening hours
|
||||
#
|
||||
normal_opening_hours = [
|
||||
"lundi-mardi jeudi 9h30-12h30 14h-16h30",
|
||||
"mercredi vendredi 9h30-12h30 14h-17h",
|
||||
"samedi",
|
||||
]
|
||||
normally_opened_datetime = datetime.datetime(2024, 3, 1, 10, 15)
|
||||
normally_opened_all_day_datetime = datetime.datetime(2024, 4, 6, 10, 15)
|
||||
normally_closed_datetime = datetime.datetime(2017, 3, 1, 20, 15)
|
||||
normally_closed_all_day_datetime = datetime.datetime(2024, 4, 7, 20, 15)
|
||||
|
||||
|
||||
def test_its_normally_open():
|
||||
assert opening_hours.its_normally_open(normal_opening_hours, when=normally_opened_datetime)
|
||||
|
||||
|
||||
def test_its_normally_open_all_day():
|
||||
assert opening_hours.its_normally_open(
|
||||
normal_opening_hours, when=normally_opened_all_day_datetime
|
||||
)
|
||||
|
||||
|
||||
def test_its_normally_closed():
|
||||
assert not opening_hours.its_normally_open(normal_opening_hours, when=normally_closed_datetime)
|
||||
|
||||
|
||||
def test_its_normally_closed_all_day():
|
||||
assert not opening_hours.its_normally_open(
|
||||
normal_opening_hours, when=normally_closed_all_day_datetime
|
||||
)
|
||||
|
||||
|
||||
def test_its_normally_open_ignore_time():
|
||||
assert opening_hours.its_normally_open(
|
||||
normal_opening_hours, when=normally_closed_datetime.date(), ignore_time=True
|
||||
)
|
||||
|
||||
|
||||
def test_its_normally_closed_ignore_time():
|
||||
assert not opening_hours.its_normally_open(
|
||||
normal_opening_hours, when=normally_closed_all_day_datetime.date(), ignore_time=True
|
||||
)
|
||||
|
||||
|
||||
#
|
||||
# Tests on non working days
|
||||
#
|
||||
nonworking_public_holidays = [
|
||||
"1janvier",
|
||||
"paques",
|
||||
"lundi_paques",
|
||||
"8mai",
|
||||
"jeudi_ascension",
|
||||
"lundi_pentecote",
|
||||
"14juillet",
|
||||
"15aout",
|
||||
"1novembre",
|
||||
"11novembre",
|
||||
"noel",
|
||||
]
|
||||
nonworking_date = datetime.date(2017, 1, 1)
|
||||
not_included_nonworking_date = datetime.date(2017, 5, 1)
|
||||
not_nonworking_date = datetime.date(2017, 5, 2)
|
||||
|
||||
|
||||
def test_its_nonworking_day():
|
||||
assert (
|
||||
opening_hours.its_nonworking_day(nonworking_public_holidays, date=nonworking_date) is True
|
||||
)
|
||||
|
||||
|
||||
def test_its_not_nonworking_day():
|
||||
assert (
|
||||
opening_hours.its_nonworking_day(
|
||||
nonworking_public_holidays,
|
||||
date=not_nonworking_date,
|
||||
)
|
||||
is False
|
||||
)
|
||||
|
||||
|
||||
def test_its_not_included_nonworking_day():
|
||||
assert (
|
||||
opening_hours.its_nonworking_day(
|
||||
nonworking_public_holidays,
|
||||
date=not_included_nonworking_date,
|
||||
)
|
||||
is False
|
||||
)
|
||||
|
||||
|
||||
#
|
||||
# Tests in exceptional closures
|
||||
#
|
||||
exceptional_closures = [
|
||||
"22/09/2017",
|
||||
"20/09/2017-22/09/2017",
|
||||
"20/09/2017-22/09/2017 18/09/2017",
|
||||
"25/11/2017",
|
||||
"26/11/2017 9h30-12h30",
|
||||
"27/11/2017 17h-18h 9h30-12h30",
|
||||
]
|
||||
exceptional_closure_all_day_date = datetime.date(2017, 9, 22)
|
||||
exceptional_closure_all_day_datetime = datetime.datetime.combine(
|
||||
exceptional_closure_all_day_date, datetime.time(20, 15)
|
||||
)
|
||||
exceptional_closure_datetime = datetime.datetime(2017, 11, 26, 10, 30)
|
||||
exceptional_closure_datetime_hours_period = {
|
||||
"start": datetime.time(9, 30),
|
||||
"stop": datetime.time(12, 30),
|
||||
}
|
||||
not_exceptional_closure_date = datetime.date(2019, 9, 22)
|
||||
|
||||
|
||||
def test_its_exceptionally_closed():
|
||||
assert (
|
||||
opening_hours.its_exceptionally_closed(
|
||||
exceptional_closures, when=exceptional_closure_all_day_datetime
|
||||
)
|
||||
is True
|
||||
)
|
||||
|
||||
|
||||
def test_its_not_exceptionally_closed():
|
||||
assert (
|
||||
opening_hours.its_exceptionally_closed(
|
||||
exceptional_closures, when=not_exceptional_closure_date
|
||||
)
|
||||
is False
|
||||
)
|
||||
|
||||
|
||||
def test_its_exceptionally_closed_all_day():
|
||||
assert (
|
||||
opening_hours.its_exceptionally_closed(
|
||||
exceptional_closures, when=exceptional_closure_all_day_datetime, all_day=True
|
||||
)
|
||||
is True
|
||||
)
|
||||
|
||||
|
||||
def test_its_not_exceptionally_closed_all_day():
|
||||
assert (
|
||||
opening_hours.its_exceptionally_closed(
|
||||
exceptional_closures, when=exceptional_closure_datetime, all_day=True
|
||||
)
|
||||
is False
|
||||
)
|
||||
|
||||
|
||||
def test_get_exceptional_closures_hours():
|
||||
assert opening_hours.get_exceptional_closures_hours(
|
||||
exceptional_closures, date=exceptional_closure_datetime.date()
|
||||
) == [exceptional_closure_datetime_hours_period]
|
||||
|
||||
|
||||
def test_get_exceptional_closures_hours_all_day():
|
||||
assert opening_hours.get_exceptional_closures_hours(
|
||||
exceptional_closures, date=exceptional_closure_all_day_date
|
||||
) == [{"start": datetime.datetime.min.time(), "stop": datetime.datetime.max.time()}]
|
||||
|
||||
|
||||
def test_get_exceptional_closures_hours_is_sorted():
|
||||
assert opening_hours.get_exceptional_closures_hours(
|
||||
["27/11/2017 17h-18h 9h30-12h30"], date=datetime.date(2017, 11, 27)
|
||||
) == [
|
||||
{"start": datetime.time(9, 30), "stop": datetime.time(12, 30)},
|
||||
{"start": datetime.time(17, 0), "stop": datetime.time(18, 0)},
|
||||
]
|
||||
|
||||
|
||||
#
|
||||
# Tests on is_closed
|
||||
#
|
||||
|
||||
|
||||
def test_is_closed_when_normaly_closed_by_hour():
|
||||
assert opening_hours.is_closed(
|
||||
normal_opening_hours_values=normal_opening_hours,
|
||||
exceptional_closures_values=exceptional_closures,
|
||||
nonworking_public_holidays_values=nonworking_public_holidays,
|
||||
when=datetime.datetime(2017, 5, 1, 20, 15),
|
||||
) == {"closed": True, "exceptional_closure": False, "exceptional_closure_all_day": False}
|
||||
|
||||
|
||||
def test_is_closed_on_exceptional_closure_full_day():
|
||||
assert opening_hours.is_closed(
|
||||
normal_opening_hours_values=normal_opening_hours,
|
||||
exceptional_closures_values=exceptional_closures,
|
||||
nonworking_public_holidays_values=nonworking_public_holidays,
|
||||
when=datetime.datetime(2017, 9, 22, 14, 15),
|
||||
) == {"closed": True, "exceptional_closure": True, "exceptional_closure_all_day": True}
|
||||
|
||||
|
||||
def test_is_closed_on_exceptional_closure_day():
|
||||
assert opening_hours.is_closed(
|
||||
normal_opening_hours_values=normal_opening_hours,
|
||||
exceptional_closures_values=exceptional_closures,
|
||||
nonworking_public_holidays_values=nonworking_public_holidays,
|
||||
when=datetime.datetime(2017, 11, 26, 10, 30),
|
||||
) == {"closed": True, "exceptional_closure": True, "exceptional_closure_all_day": False}
|
||||
|
||||
|
||||
def test_is_closed_on_nonworking_public_holidays():
|
||||
assert opening_hours.is_closed(
|
||||
normal_opening_hours_values=normal_opening_hours,
|
||||
exceptional_closures_values=exceptional_closures,
|
||||
nonworking_public_holidays_values=nonworking_public_holidays,
|
||||
when=datetime.datetime(2017, 1, 1, 10, 30),
|
||||
) == {"closed": True, "exceptional_closure": False, "exceptional_closure_all_day": False}
|
||||
|
||||
|
||||
def test_is_closed_when_normaly_closed_by_day():
|
||||
assert opening_hours.is_closed(
|
||||
normal_opening_hours_values=normal_opening_hours,
|
||||
exceptional_closures_values=exceptional_closures,
|
||||
nonworking_public_holidays_values=nonworking_public_holidays,
|
||||
when=datetime.datetime(2017, 5, 7, 14, 15),
|
||||
) == {"closed": True, "exceptional_closure": False, "exceptional_closure_all_day": False}
|
||||
|
||||
|
||||
def test_is_closed_when_normaly_opened():
|
||||
assert opening_hours.is_closed(
|
||||
normal_opening_hours_values=normal_opening_hours,
|
||||
exceptional_closures_values=exceptional_closures,
|
||||
nonworking_public_holidays_values=nonworking_public_holidays,
|
||||
when=datetime.datetime(2017, 5, 2, 15, 15),
|
||||
) == {"closed": False, "exceptional_closure": False, "exceptional_closure_all_day": False}
|
||||
|
||||
|
||||
def test_easter_date():
|
||||
assert opening_hours.easter_date(2010) == datetime.date(2010, 4, 4)
|
||||
assert opening_hours.easter_date(2011) == datetime.date(2011, 4, 24)
|
||||
assert opening_hours.easter_date(2012) == datetime.date(2012, 4, 8)
|
||||
assert opening_hours.easter_date(2013) == datetime.date(2013, 3, 31)
|
||||
assert opening_hours.easter_date(2014) == datetime.date(2014, 4, 20)
|
||||
assert opening_hours.easter_date(2015) == datetime.date(2015, 4, 5)
|
||||
assert opening_hours.easter_date(2016) == datetime.date(2016, 3, 27)
|
||||
assert opening_hours.easter_date(2017) == datetime.date(2017, 4, 16)
|
||||
assert opening_hours.easter_date(2018) == datetime.date(2018, 4, 1)
|
||||
assert opening_hours.easter_date(2019) == datetime.date(2019, 4, 21)
|
||||
assert opening_hours.easter_date(2020) == datetime.date(2020, 4, 12)
|
||||
assert opening_hours.easter_date(2021) == datetime.date(2021, 4, 4)
|
||||
|
||||
|
||||
def test_nonworking_french_public_days_of_the_year():
|
||||
assert opening_hours.nonworking_french_public_days_of_the_year(2021) == {
|
||||
"1janvier": datetime.date(2021, 1, 1),
|
||||
"paques": datetime.date(2021, 4, 4),
|
||||
"lundi_paques": datetime.date(2021, 4, 5),
|
||||
"1mai": datetime.date(2021, 5, 1),
|
||||
"8mai": datetime.date(2021, 5, 8),
|
||||
"jeudi_ascension": datetime.date(2021, 5, 13),
|
||||
"pentecote": datetime.date(2021, 5, 23),
|
||||
"lundi_pentecote": datetime.date(2021, 5, 24),
|
||||
"14juillet": datetime.date(2021, 7, 14),
|
||||
"15aout": datetime.date(2021, 8, 15),
|
||||
"1novembre": datetime.date(2021, 11, 1),
|
||||
"11novembre": datetime.date(2021, 11, 11),
|
||||
"noel": datetime.date(2021, 12, 25),
|
||||
"saint_etienne": datetime.date(2021, 12, 26),
|
||||
}
|
||||
|
||||
|
||||
def test_next_opening_date():
|
||||
assert opening_hours.next_opening_date(
|
||||
normal_opening_hours_values=normal_opening_hours,
|
||||
exceptional_closures_values=exceptional_closures,
|
||||
nonworking_public_holidays_values=nonworking_public_holidays,
|
||||
date=datetime.date(2021, 4, 4),
|
||||
) == datetime.date(2021, 4, 6)
|
||||
|
||||
|
||||
def test_next_opening_hour():
|
||||
assert opening_hours.next_opening_hour(
|
||||
normal_opening_hours_values=normal_opening_hours,
|
||||
exceptional_closures_values=exceptional_closures,
|
||||
nonworking_public_holidays_values=nonworking_public_holidays,
|
||||
when=datetime.datetime(2021, 4, 4, 10, 30),
|
||||
) == datetime.datetime(2021, 4, 6, 9, 30)
|
||||
|
||||
|
||||
def test_next_opening_hour_with_exceptionnal_closure_hours():
|
||||
assert opening_hours.next_opening_hour(
|
||||
normal_opening_hours_values=["lundi-vendredi 9h-12h 14h-18h"],
|
||||
exceptional_closures_values=["06/04/2021 9h-13h 14h-16h"],
|
||||
nonworking_public_holidays_values=nonworking_public_holidays,
|
||||
when=datetime.datetime(2021, 4, 4, 10, 30),
|
||||
) == datetime.datetime(2021, 4, 6, 16, 0)
|
||||
|
||||
|
||||
def test_next_opening_hour_with_exceptionnal_closure_day():
|
||||
assert opening_hours.next_opening_hour(
|
||||
normal_opening_hours_values=["lundi-vendredi 9h-12h 14h-18h"],
|
||||
exceptional_closures_values=["06/04/2021"],
|
||||
nonworking_public_holidays_values=nonworking_public_holidays,
|
||||
when=datetime.datetime(2021, 4, 4, 10, 30),
|
||||
) == datetime.datetime(2021, 4, 7, 9, 0)
|
||||
|
||||
|
||||
def test_next_opening_hour_with_overlapsed_opening_hours():
|
||||
assert opening_hours.next_opening_hour(
|
||||
normal_opening_hours_values=["lundi-vendredi 9h-12h 14h-18h", "mardi 8h-19h"],
|
||||
exceptional_closures_values=["06/04/2021 9h-13h 14h-16h"],
|
||||
nonworking_public_holidays_values=nonworking_public_holidays,
|
||||
when=datetime.datetime(2021, 4, 4, 10, 30),
|
||||
) == datetime.datetime(2021, 4, 6, 8, 0)
|
||||
|
||||
|
||||
def test_next_opening_hour_with_too_large_exceptionnal_closure_days():
|
||||
assert (
|
||||
opening_hours.next_opening_hour(
|
||||
normal_opening_hours_values=["lundi-vendredi 9h-12h 14h-18h"],
|
||||
exceptional_closures_values=["06/04/2021-16-04/2021"],
|
||||
nonworking_public_holidays_values=nonworking_public_holidays,
|
||||
when=datetime.datetime(2021, 4, 4, 10, 30),
|
||||
max_anaylse_days=10,
|
||||
)
|
||||
is False
|
||||
)
|
||||
|
||||
|
||||
def test_next_opening_hour_on_opened_moment():
|
||||
assert opening_hours.next_opening_hour(
|
||||
normal_opening_hours_values=["lundi-vendredi 9h-12h 14h-18h"],
|
||||
exceptional_closures_values=[],
|
||||
nonworking_public_holidays_values=nonworking_public_holidays,
|
||||
when=datetime.datetime(2021, 4, 6, 10, 30),
|
||||
) == datetime.datetime(2021, 4, 6, 10, 30)
|
||||
|
||||
|
||||
def test_next_opening_hour_on_same_day():
|
||||
assert opening_hours.next_opening_hour(
|
||||
normal_opening_hours_values=["lundi-vendredi 9h-12h 14h-18h"],
|
||||
exceptional_closures_values=[],
|
||||
nonworking_public_holidays_values=nonworking_public_holidays,
|
||||
when=datetime.datetime(2021, 4, 6, 13, 0),
|
||||
) == datetime.datetime(2021, 4, 6, 14, 0)
|
||||
assert opening_hours.next_opening_hour(
|
||||
normal_opening_hours_values=["lundi-vendredi 9h-12h 14h-18h"],
|
||||
exceptional_closures_values=[],
|
||||
nonworking_public_holidays_values=nonworking_public_holidays,
|
||||
when=datetime.datetime(2021, 4, 6, 16, 0),
|
||||
) == datetime.datetime(2021, 4, 6, 16, 0)
|
||||
assert opening_hours.next_opening_hour(
|
||||
normal_opening_hours_values=["lundi-vendredi"],
|
||||
exceptional_closures_values=[],
|
||||
nonworking_public_holidays_values=nonworking_public_holidays,
|
||||
when=datetime.datetime(2021, 4, 6, 16, 0),
|
||||
) == datetime.datetime(2021, 4, 6, 16, 0)
|
||||
|
||||
|
||||
def test_next_opening_hour_on_opened_day_but_too_late():
|
||||
assert opening_hours.next_opening_hour(
|
||||
normal_opening_hours_values=["lundi-vendredi 9h-12h 14h-18h"],
|
||||
exceptional_closures_values=[],
|
||||
nonworking_public_holidays_values=nonworking_public_holidays,
|
||||
when=datetime.datetime(2021, 4, 6, 23, 0),
|
||||
) == datetime.datetime(2021, 4, 7, 9, 0)
|
||||
|
||||
|
||||
def test_previous_opening_date():
|
||||
assert opening_hours.previous_opening_date(
|
||||
normal_opening_hours_values=["lundi-vendredi 9h-18h"],
|
||||
exceptional_closures_values=[],
|
||||
nonworking_public_holidays_values=nonworking_public_holidays,
|
||||
date=datetime.date(2024, 4, 1),
|
||||
) == datetime.date(2024, 3, 29)
|
||||
|
||||
|
||||
def test_previous_opening_hour():
|
||||
assert opening_hours.previous_opening_hour(
|
||||
normal_opening_hours_values=["lundi-vendredi 9h-18h"],
|
||||
exceptional_closures_values=[],
|
||||
nonworking_public_holidays_values=nonworking_public_holidays,
|
||||
when=datetime.datetime(2024, 4, 1, 10, 30),
|
||||
) == datetime.datetime(2024, 3, 29, 18, 0)
|
||||
|
||||
|
||||
def test_previous_opening_hour_with_exceptionnal_closure_hours():
|
||||
assert opening_hours.previous_opening_hour(
|
||||
normal_opening_hours_values=["lundi-vendredi 9h-12h 14h-18h"],
|
||||
exceptional_closures_values=["29/03/2024 14h-18h"],
|
||||
nonworking_public_holidays_values=nonworking_public_holidays,
|
||||
when=datetime.datetime(2024, 4, 1, 10, 30),
|
||||
) == datetime.datetime(2024, 3, 29, 12, 0)
|
||||
assert opening_hours.previous_opening_hour(
|
||||
normal_opening_hours_values=["lundi-vendredi 9h-12h 14h-18h"],
|
||||
exceptional_closures_values=["29/03/2024 16h-18h"],
|
||||
nonworking_public_holidays_values=nonworking_public_holidays,
|
||||
when=datetime.datetime(2024, 4, 1, 10, 30),
|
||||
) == datetime.datetime(2024, 3, 29, 16, 0)
|
||||
|
||||
|
||||
def test_previous_opening_hour_with_exceptionnal_closure_day():
|
||||
assert opening_hours.previous_opening_hour(
|
||||
normal_opening_hours_values=["lundi-vendredi 9h-12h 14h-18h"],
|
||||
exceptional_closures_values=["29/03/2024"],
|
||||
nonworking_public_holidays_values=nonworking_public_holidays,
|
||||
when=datetime.datetime(2024, 4, 1, 10, 30),
|
||||
) == datetime.datetime(2024, 3, 28, 18, 0)
|
||||
|
||||
|
||||
def test_previous_opening_hour_with_overlapsed_opening_hours():
|
||||
assert opening_hours.previous_opening_hour(
|
||||
normal_opening_hours_values=["lundi-vendredi 9h-12h 14h-18h", "mardi 8h-19h"],
|
||||
exceptional_closures_values=[],
|
||||
nonworking_public_holidays_values=nonworking_public_holidays,
|
||||
when=datetime.datetime(2024, 4, 3, 8, 30),
|
||||
) == datetime.datetime(2024, 4, 2, 19, 0)
|
||||
|
||||
|
||||
def test_previous_opening_hour_with_too_large_exceptionnal_closure_days():
|
||||
assert (
|
||||
opening_hours.previous_opening_hour(
|
||||
normal_opening_hours_values=["lundi-vendredi 9h-12h 14h-18h"],
|
||||
exceptional_closures_values=["06/03/2024-16-04/2024"],
|
||||
nonworking_public_holidays_values=nonworking_public_holidays,
|
||||
when=datetime.datetime(2024, 4, 17, 8, 30),
|
||||
max_anaylse_days=10,
|
||||
)
|
||||
is False
|
||||
)
|
||||
|
||||
|
||||
def test_previous_opening_hour_on_opened_moment():
|
||||
assert opening_hours.previous_opening_hour(
|
||||
normal_opening_hours_values=["lundi-vendredi 9h-12h 14h-18h"],
|
||||
exceptional_closures_values=[],
|
||||
nonworking_public_holidays_values=nonworking_public_holidays,
|
||||
when=datetime.datetime(2024, 4, 5, 10, 30),
|
||||
) == datetime.datetime(2024, 4, 5, 10, 30)
|
||||
|
||||
|
||||
def test_previous_opening_hour_on_same_day():
|
||||
assert opening_hours.previous_opening_hour(
|
||||
normal_opening_hours_values=["lundi-vendredi 9h-12h 14h-18h"],
|
||||
exceptional_closures_values=[],
|
||||
nonworking_public_holidays_values=nonworking_public_holidays,
|
||||
when=datetime.datetime(2024, 4, 5, 13, 0),
|
||||
) == datetime.datetime(2024, 4, 5, 12, 0)
|
||||
assert opening_hours.previous_opening_hour(
|
||||
normal_opening_hours_values=["lundi-vendredi 9h-12h 14h-18h"],
|
||||
exceptional_closures_values=[],
|
||||
nonworking_public_holidays_values=nonworking_public_holidays,
|
||||
when=datetime.datetime(2024, 4, 5, 16, 0),
|
||||
) == datetime.datetime(2024, 4, 5, 16, 0)
|
||||
assert opening_hours.previous_opening_hour(
|
||||
normal_opening_hours_values=["lundi-vendredi"],
|
||||
exceptional_closures_values=[],
|
||||
nonworking_public_holidays_values=nonworking_public_holidays,
|
||||
when=datetime.datetime(2024, 4, 5, 16, 0),
|
||||
) == datetime.datetime(2024, 4, 5, 16, 0)
|
||||
|
||||
|
||||
def test_previous_opening_hour_on_opened_day_but_too_early():
|
||||
assert opening_hours.previous_opening_hour(
|
||||
normal_opening_hours_values=["lundi-vendredi 9h-12h 14h-18h"],
|
||||
exceptional_closures_values=[],
|
||||
nonworking_public_holidays_values=nonworking_public_holidays,
|
||||
when=datetime.datetime(2024, 4, 5, 8, 0),
|
||||
) == datetime.datetime(2024, 4, 4, 18, 0)
|
490
tests/test_oracle.py
Normal file
490
tests/test_oracle.py
Normal file
|
@ -0,0 +1,490 @@
|
|||
# pylint: disable=redefined-outer-name,missing-function-docstring,protected-access
|
||||
""" Tests on opening hours helpers """
|
||||
|
||||
import cx_Oracle
|
||||
import pytest
|
||||
|
||||
from mylib.oracle import OracleDB
|
||||
|
||||
|
||||
class FakeCXOracleCursor:
|
||||
"""Fake cx_Oracle cursor"""
|
||||
|
||||
def __init__(
|
||||
self, expected_sql, expected_params, expected_return, expected_just_try, expected_exception
|
||||
):
|
||||
self.expected_sql = expected_sql
|
||||
self.expected_params = expected_params
|
||||
self.expected_return = expected_return
|
||||
self.expected_just_try = expected_just_try
|
||||
self.expected_exception = expected_exception
|
||||
self.opened = True
|
||||
|
||||
def execute(self, sql, **params):
|
||||
assert self.opened
|
||||
if self.expected_exception:
|
||||
raise cx_Oracle.Error(f"{self}.execute({sql}, {params}): expected exception")
|
||||
if self.expected_just_try and not sql.lower().startswith("select "):
|
||||
assert False, f"{self}.execute({sql}, {params}) may not be executed in just try mode"
|
||||
# pylint: disable=consider-using-f-string
|
||||
assert (
|
||||
sql == self.expected_sql
|
||||
), "%s.execute(): Invalid SQL query:\n '%s'\nMay be:\n '%s'" % (
|
||||
self,
|
||||
sql,
|
||||
self.expected_sql,
|
||||
)
|
||||
# pylint: disable=consider-using-f-string
|
||||
assert (
|
||||
params == self.expected_params
|
||||
), "%s.execute(): Invalid params:\n %s\nMay be:\n %s" % (
|
||||
self,
|
||||
params,
|
||||
self.expected_params,
|
||||
)
|
||||
return self.expected_return
|
||||
|
||||
def fetchall(self):
|
||||
assert self.opened
|
||||
return self.expected_return
|
||||
|
||||
def __enter__(self):
|
||||
self.opened = True
|
||||
return self
|
||||
|
||||
def __exit__(self, *args):
|
||||
self.opened = False
|
||||
|
||||
def __repr__(self):
|
||||
return (
|
||||
f"FakeCXOracleCursor({self.expected_sql}, {self.expected_params}, "
|
||||
f"{self.expected_return}, {self.expected_just_try})"
|
||||
)
|
||||
|
||||
|
||||
class FakeCXOracle:
|
||||
"""Fake cx_Oracle connection"""
|
||||
|
||||
expected_sql = None
|
||||
expected_params = {}
|
||||
expected_return = True
|
||||
expected_just_try = False
|
||||
expected_exception = False
|
||||
just_try = False
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
allowed_kwargs = {"dsn": str, "user": str, "password": (str, None)}
|
||||
for arg, value in kwargs.items():
|
||||
assert arg in allowed_kwargs, f"Invalid arg {arg}='{value}'"
|
||||
assert isinstance(
|
||||
value, allowed_kwargs[arg]
|
||||
), f"Arg {arg} not a {allowed_kwargs[arg]} ({type(value)})"
|
||||
setattr(self, arg, value)
|
||||
|
||||
def close(self):
|
||||
return self.expected_return
|
||||
|
||||
def cursor(self):
|
||||
return FakeCXOracleCursor(
|
||||
self.expected_sql,
|
||||
self.expected_params,
|
||||
self.expected_return,
|
||||
self.expected_just_try or self.just_try,
|
||||
self.expected_exception,
|
||||
)
|
||||
|
||||
def commit(self):
|
||||
self._check_just_try()
|
||||
return self.expected_return
|
||||
|
||||
def rollback(self):
|
||||
self._check_just_try()
|
||||
return self.expected_return
|
||||
|
||||
def _check_just_try(self):
|
||||
if self.just_try:
|
||||
assert False, "May not be executed in just try mode"
|
||||
|
||||
|
||||
def fake_cxoracle_connect(**kwargs):
|
||||
return FakeCXOracle(**kwargs)
|
||||
|
||||
|
||||
def fake_cxoracle_connect_just_try(**kwargs):
|
||||
con = FakeCXOracle(**kwargs)
|
||||
con.just_try = True
|
||||
return con
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_oracledb():
|
||||
return OracleDB("127.0.0.1/dbname", "user", "password")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def fake_oracledb(mocker):
|
||||
mocker.patch("cx_Oracle.connect", fake_cxoracle_connect)
|
||||
return OracleDB("127.0.0.1/dbname", "user", "password")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def fake_just_try_oracledb(mocker):
|
||||
mocker.patch("cx_Oracle.connect", fake_cxoracle_connect_just_try)
|
||||
return OracleDB("127.0.0.1/dbname", "user", "password", just_try=True)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def fake_connected_oracledb(fake_oracledb):
|
||||
fake_oracledb.connect()
|
||||
return fake_oracledb
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def fake_connected_just_try_oracledb(fake_just_try_oracledb):
|
||||
fake_just_try_oracledb.connect()
|
||||
return fake_just_try_oracledb
|
||||
|
||||
|
||||
def generate_mock_args(
|
||||
expected_args=(), expected_kwargs={}, expected_return=True
|
||||
): # pylint: disable=dangerous-default-value
|
||||
def mock_args(*args, **kwargs):
|
||||
# pylint: disable=consider-using-f-string
|
||||
assert args == expected_args, "Invalid call args:\n %s\nMay be:\n %s" % (
|
||||
args,
|
||||
expected_args,
|
||||
)
|
||||
# pylint: disable=consider-using-f-string
|
||||
assert kwargs == expected_kwargs, "Invalid call kwargs:\n %s\nMay be:\n %s" % (
|
||||
kwargs,
|
||||
expected_kwargs,
|
||||
)
|
||||
return expected_return
|
||||
|
||||
return mock_args
|
||||
|
||||
|
||||
def mock_doSQL_just_try(self, sql, params=None): # pylint: disable=unused-argument
|
||||
assert False, "doSQL() may not be executed in just try mode"
|
||||
|
||||
|
||||
def generate_mock_doSQL(
|
||||
expected_sql, expected_params={}, expected_return=True
|
||||
): # pylint: disable=dangerous-default-value
|
||||
def mock_doSQL(self, sql, params=None): # pylint: disable=unused-argument
|
||||
# pylint: disable=consider-using-f-string
|
||||
assert sql == expected_sql, "Invalid generated SQL query:\n '%s'\nMay be:\n '%s'" % (
|
||||
sql,
|
||||
expected_sql,
|
||||
)
|
||||
# pylint: disable=consider-using-f-string
|
||||
assert params == expected_params, "Invalid generated params:\n %s\nMay be:\n %s" % (
|
||||
params,
|
||||
expected_params,
|
||||
)
|
||||
return expected_return
|
||||
|
||||
return mock_doSQL
|
||||
|
||||
|
||||
# OracleDB.doSelect() have same expected parameters as OracleDB.doSQL()
|
||||
generate_mock_doSelect = generate_mock_doSQL
|
||||
mock_doSelect_just_try = mock_doSQL_just_try
|
||||
|
||||
#
|
||||
# Test on OracleDB helper methods
|
||||
#
|
||||
|
||||
|
||||
def test_combine_params_with_to_add_parameter():
|
||||
assert OracleDB._combine_params({"test1": 1}, {"test2": 2}) == {"test1": 1, "test2": 2}
|
||||
|
||||
|
||||
def test_combine_params_with_kargs():
|
||||
assert OracleDB._combine_params({"test1": 1}, test2=2) == {"test1": 1, "test2": 2}
|
||||
|
||||
|
||||
def test_combine_params_with_kargs_and_to_add_parameter():
|
||||
assert OracleDB._combine_params({"test1": 1}, {"test2": 2}, test3=3) == {
|
||||
"test1": 1,
|
||||
"test2": 2,
|
||||
"test3": 3,
|
||||
}
|
||||
|
||||
|
||||
def test_format_where_clauses_params_are_preserved():
|
||||
args = ("test = test", {"test1": 1})
|
||||
assert OracleDB._format_where_clauses(*args) == args
|
||||
|
||||
|
||||
def test_format_where_clauses_raw():
|
||||
assert OracleDB._format_where_clauses("test = test") == ("test = test", {})
|
||||
|
||||
|
||||
def test_format_where_clauses_tuple_clause_with_params():
|
||||
where_clauses = ("test1 = :test1 AND test2 = :test2", {"test1": 1, "test2": 2})
|
||||
assert OracleDB._format_where_clauses(where_clauses) == where_clauses
|
||||
|
||||
|
||||
def test_format_where_clauses_with_list_as_value():
|
||||
assert OracleDB._format_where_clauses({"test": [1, 2]}) == (
|
||||
'"test" IN (:test_0, :test_1)',
|
||||
{"test_0": 1, "test_1": 2},
|
||||
)
|
||||
|
||||
|
||||
def test_format_where_clauses_dict():
|
||||
where_clauses = {"test1": 1, "test2": 2}
|
||||
assert OracleDB._format_where_clauses(where_clauses) == (
|
||||
'"test1" = :test1 AND "test2" = :test2',
|
||||
where_clauses,
|
||||
)
|
||||
|
||||
|
||||
def test_format_where_clauses_combined_types():
|
||||
where_clauses = ("test1 = 1", ("test2 LIKE :test2", {"test2": 2}), {"test3": 3, "test4": 4})
|
||||
assert OracleDB._format_where_clauses(where_clauses) == (
|
||||
'test1 = 1 AND test2 LIKE :test2 AND "test3" = :test3 AND "test4" = :test4',
|
||||
{"test2": 2, "test3": 3, "test4": 4},
|
||||
)
|
||||
|
||||
|
||||
def test_format_where_clauses_with_where_op():
|
||||
where_clauses = {"test1": 1, "test2": 2}
|
||||
assert OracleDB._format_where_clauses(where_clauses, where_op="OR") == (
|
||||
'"test1" = :test1 OR "test2" = :test2',
|
||||
where_clauses,
|
||||
)
|
||||
|
||||
|
||||
def test_add_where_clauses():
|
||||
sql = "SELECT * FROM table"
|
||||
where_clauses = {"test1": 1, "test2": 2}
|
||||
assert OracleDB._add_where_clauses(sql, None, where_clauses) == (
|
||||
sql + ' WHERE "test1" = :test1 AND "test2" = :test2',
|
||||
where_clauses,
|
||||
)
|
||||
|
||||
|
||||
def test_add_where_clauses_preserved_params():
|
||||
sql = "SELECT * FROM table"
|
||||
where_clauses = {"test1": 1, "test2": 2}
|
||||
params = {"fake1": 1}
|
||||
assert OracleDB._add_where_clauses(sql, params.copy(), where_clauses) == (
|
||||
sql + ' WHERE "test1" = :test1 AND "test2" = :test2',
|
||||
{**where_clauses, **params},
|
||||
)
|
||||
|
||||
|
||||
def test_add_where_clauses_with_op():
|
||||
sql = "SELECT * FROM table"
|
||||
where_clauses = ("test1=1", "test2=2")
|
||||
assert OracleDB._add_where_clauses(sql, None, where_clauses, where_op="OR") == (
|
||||
sql + " WHERE test1=1 OR test2=2",
|
||||
{},
|
||||
)
|
||||
|
||||
|
||||
def test_add_where_clauses_with_duplicated_field():
|
||||
sql = "UPDATE table SET test1=:test1"
|
||||
params = {"test1": "new_value"}
|
||||
where_clauses = {"test1": "where_value"}
|
||||
assert OracleDB._add_where_clauses(sql, params, where_clauses) == (
|
||||
sql + ' WHERE "test1" = :test1_1',
|
||||
{"test1": "new_value", "test1_1": "where_value"},
|
||||
)
|
||||
|
||||
|
||||
def test_quote_table_name():
|
||||
assert OracleDB._quote_table_name("mytable") == '"mytable"'
|
||||
assert OracleDB._quote_table_name("myschema.mytable") == '"myschema"."mytable"'
|
||||
|
||||
|
||||
def test_insert(mocker, test_oracledb):
|
||||
values = {"test1": 1, "test2": 2}
|
||||
mocker.patch(
|
||||
"mylib.oracle.OracleDB.doSQL",
|
||||
generate_mock_doSQL(
|
||||
'INSERT INTO "mytable" ("test1", "test2") VALUES (:test1, :test2)', values
|
||||
),
|
||||
)
|
||||
|
||||
assert test_oracledb.insert("mytable", values)
|
||||
|
||||
|
||||
def test_insert_just_try(mocker, test_oracledb):
|
||||
mocker.patch("mylib.oracle.OracleDB.doSQL", mock_doSQL_just_try)
|
||||
assert test_oracledb.insert("mytable", {"test1": 1, "test2": 2}, just_try=True)
|
||||
|
||||
|
||||
def test_update(mocker, test_oracledb):
|
||||
values = {"test1": 1, "test2": 2}
|
||||
where_clauses = {"test3": 3, "test4": 4}
|
||||
mocker.patch(
|
||||
"mylib.oracle.OracleDB.doSQL",
|
||||
generate_mock_doSQL(
|
||||
'UPDATE "mytable" SET "test1" = :test1, "test2" = :test2 WHERE "test3" = :test3 AND'
|
||||
' "test4" = :test4',
|
||||
{**values, **where_clauses},
|
||||
),
|
||||
)
|
||||
|
||||
assert test_oracledb.update("mytable", values, where_clauses)
|
||||
|
||||
|
||||
def test_update_just_try(mocker, test_oracledb):
|
||||
mocker.patch("mylib.oracle.OracleDB.doSQL", mock_doSQL_just_try)
|
||||
assert test_oracledb.update("mytable", {"test1": 1, "test2": 2}, None, just_try=True)
|
||||
|
||||
|
||||
def test_delete(mocker, test_oracledb):
|
||||
where_clauses = {"test1": 1, "test2": 2}
|
||||
mocker.patch(
|
||||
"mylib.oracle.OracleDB.doSQL",
|
||||
generate_mock_doSQL(
|
||||
'DELETE FROM "mytable" WHERE "test1" = :test1 AND "test2" = :test2', where_clauses
|
||||
),
|
||||
)
|
||||
|
||||
assert test_oracledb.delete("mytable", where_clauses)
|
||||
|
||||
|
||||
def test_delete_just_try(mocker, test_oracledb):
|
||||
mocker.patch("mylib.oracle.OracleDB.doSQL", mock_doSQL_just_try)
|
||||
assert test_oracledb.delete("mytable", None, just_try=True)
|
||||
|
||||
|
||||
def test_truncate(mocker, test_oracledb):
|
||||
mocker.patch(
|
||||
"mylib.oracle.OracleDB.doSQL", generate_mock_doSQL('TRUNCATE TABLE "mytable"', None)
|
||||
)
|
||||
|
||||
assert test_oracledb.truncate("mytable")
|
||||
|
||||
|
||||
def test_truncate_just_try(mocker, test_oracledb):
|
||||
mocker.patch("mylib.oracle.OracleDB.doSQL", mock_doSelect_just_try)
|
||||
assert test_oracledb.truncate("mytable", just_try=True)
|
||||
|
||||
|
||||
def test_select(mocker, test_oracledb):
|
||||
fields = ("field1", "field2")
|
||||
where_clauses = {"test3": 3, "test4": 4}
|
||||
expected_return = [
|
||||
{"field1": 1, "field2": 2},
|
||||
{"field1": 2, "field2": 3},
|
||||
]
|
||||
order_by = "field1, DESC"
|
||||
limit = 10
|
||||
mocker.patch(
|
||||
"mylib.oracle.OracleDB.doSelect",
|
||||
generate_mock_doSQL(
|
||||
'SELECT "field1", "field2" FROM "mytable" WHERE "test3" = :test3 AND "test4" = :test4'
|
||||
" ORDER BY " + order_by + " LIMIT " + str(limit), # nosec: B608
|
||||
where_clauses,
|
||||
expected_return,
|
||||
),
|
||||
)
|
||||
|
||||
assert (
|
||||
test_oracledb.select("mytable", where_clauses, fields, order_by=order_by, limit=limit)
|
||||
== expected_return
|
||||
)
|
||||
|
||||
|
||||
def test_select_without_field_and_order_by(mocker, test_oracledb):
|
||||
mocker.patch("mylib.oracle.OracleDB.doSelect", generate_mock_doSQL('SELECT * FROM "mytable"'))
|
||||
|
||||
assert test_oracledb.select("mytable")
|
||||
|
||||
|
||||
def test_select_just_try(mocker, test_oracledb):
|
||||
mocker.patch("mylib.oracle.OracleDB.doSQL", mock_doSelect_just_try)
|
||||
assert test_oracledb.select("mytable", None, None, just_try=True)
|
||||
|
||||
|
||||
#
|
||||
# Tests on main methods
|
||||
#
|
||||
|
||||
|
||||
def test_connect(mocker, test_oracledb):
|
||||
expected_kwargs = {
|
||||
"dsn": test_oracledb._dsn,
|
||||
"user": test_oracledb._user,
|
||||
"password": test_oracledb._pwd,
|
||||
}
|
||||
|
||||
mocker.patch("cx_Oracle.connect", generate_mock_args(expected_kwargs=expected_kwargs))
|
||||
|
||||
assert test_oracledb.connect()
|
||||
|
||||
|
||||
def test_close(fake_oracledb):
|
||||
assert fake_oracledb.close() is None
|
||||
|
||||
|
||||
def test_close_connected(fake_connected_oracledb):
|
||||
assert fake_connected_oracledb.close() is None
|
||||
|
||||
|
||||
def test_doSQL(fake_connected_oracledb):
|
||||
fake_connected_oracledb._conn.expected_sql = "DELETE FROM table WHERE test1 = :test1"
|
||||
fake_connected_oracledb._conn.expected_params = {"test1": 1}
|
||||
fake_connected_oracledb.doSQL(
|
||||
fake_connected_oracledb._conn.expected_sql, fake_connected_oracledb._conn.expected_params
|
||||
)
|
||||
|
||||
|
||||
def test_doSQL_without_params(fake_connected_oracledb):
|
||||
fake_connected_oracledb._conn.expected_sql = "DELETE FROM table"
|
||||
fake_connected_oracledb.doSQL(fake_connected_oracledb._conn.expected_sql)
|
||||
|
||||
|
||||
def test_doSQL_just_try(fake_connected_just_try_oracledb):
|
||||
assert fake_connected_just_try_oracledb.doSQL("DELETE FROM table")
|
||||
|
||||
|
||||
def test_doSQL_on_exception(fake_connected_oracledb):
|
||||
fake_connected_oracledb._conn.expected_exception = True
|
||||
assert fake_connected_oracledb.doSQL("DELETE FROM table") is False
|
||||
|
||||
|
||||
def test_doSelect(fake_connected_oracledb):
|
||||
fake_connected_oracledb._conn.expected_sql = "SELECT * FROM table WHERE test1 = :test1"
|
||||
fake_connected_oracledb._conn.expected_params = {"test1": 1}
|
||||
fake_connected_oracledb._conn.expected_return = [{"test1": 1}]
|
||||
assert (
|
||||
fake_connected_oracledb.doSelect(
|
||||
fake_connected_oracledb._conn.expected_sql,
|
||||
fake_connected_oracledb._conn.expected_params,
|
||||
)
|
||||
== fake_connected_oracledb._conn.expected_return
|
||||
)
|
||||
|
||||
|
||||
def test_doSelect_without_params(fake_connected_oracledb):
|
||||
fake_connected_oracledb._conn.expected_sql = "SELECT * FROM table"
|
||||
fake_connected_oracledb._conn.expected_return = [{"test1": 1}]
|
||||
assert (
|
||||
fake_connected_oracledb.doSelect(fake_connected_oracledb._conn.expected_sql)
|
||||
== fake_connected_oracledb._conn.expected_return
|
||||
)
|
||||
|
||||
|
||||
def test_doSelect_on_exception(fake_connected_oracledb):
|
||||
fake_connected_oracledb._conn.expected_exception = True
|
||||
assert fake_connected_oracledb.doSelect("SELECT * FROM table") is False
|
||||
|
||||
|
||||
def test_doSelect_just_try(fake_connected_just_try_oracledb):
|
||||
fake_connected_just_try_oracledb._conn.expected_sql = "SELECT * FROM table WHERE test1 = :test1"
|
||||
fake_connected_just_try_oracledb._conn.expected_params = {"test1": 1}
|
||||
fake_connected_just_try_oracledb._conn.expected_return = [{"test1": 1}]
|
||||
assert (
|
||||
fake_connected_just_try_oracledb.doSelect(
|
||||
fake_connected_just_try_oracledb._conn.expected_sql,
|
||||
fake_connected_just_try_oracledb._conn.expected_params,
|
||||
)
|
||||
== fake_connected_just_try_oracledb._conn.expected_return
|
||||
)
|
505
tests/test_pgsql.py
Normal file
505
tests/test_pgsql.py
Normal file
|
@ -0,0 +1,505 @@
|
|||
# pylint: disable=redefined-outer-name,missing-function-docstring,protected-access
|
||||
""" Tests on opening hours helpers """
|
||||
|
||||
import psycopg2
|
||||
import pytest
|
||||
from psycopg2.extras import RealDictCursor
|
||||
|
||||
from mylib.pgsql import PgDB
|
||||
|
||||
|
||||
class FakePsycopg2Cursor:
|
||||
"""Fake Psycopg2 cursor"""
|
||||
|
||||
def __init__(
|
||||
self, expected_sql, expected_params, expected_return, expected_just_try, expected_exception
|
||||
):
|
||||
self.expected_sql = expected_sql
|
||||
self.expected_params = expected_params
|
||||
self.expected_return = expected_return
|
||||
self.expected_just_try = expected_just_try
|
||||
self.expected_exception = expected_exception
|
||||
|
||||
def execute(self, sql, params=None):
|
||||
if self.expected_exception:
|
||||
raise psycopg2.Error(f"{self}.execute({sql}, {params}): expected exception")
|
||||
if self.expected_just_try and not sql.lower().startswith("select "):
|
||||
assert False, f"{self}.execute({sql}, {params}) may not be executed in just try mode"
|
||||
# pylint: disable=consider-using-f-string
|
||||
assert (
|
||||
sql == self.expected_sql
|
||||
), "%s.execute(): Invalid SQL query:\n '%s'\nMay be:\n '%s'" % (
|
||||
self,
|
||||
sql,
|
||||
self.expected_sql,
|
||||
)
|
||||
# pylint: disable=consider-using-f-string
|
||||
assert (
|
||||
params == self.expected_params
|
||||
), "%s.execute(): Invalid params:\n %s\nMay be:\n %s" % (
|
||||
self,
|
||||
params,
|
||||
self.expected_params,
|
||||
)
|
||||
return self.expected_return
|
||||
|
||||
def fetchall(self):
|
||||
return self.expected_return
|
||||
|
||||
def __repr__(self):
|
||||
return (
|
||||
f"FakePsycopg2Cursor({self.expected_sql}, {self.expected_params}, "
|
||||
f"{self.expected_return}, {self.expected_just_try})"
|
||||
)
|
||||
|
||||
|
||||
class FakePsycopg2:
|
||||
"""Fake Psycopg2 connection"""
|
||||
|
||||
expected_sql = None
|
||||
expected_params = None
|
||||
expected_cursor_factory = None
|
||||
expected_return = True
|
||||
expected_just_try = False
|
||||
expected_exception = False
|
||||
just_try = False
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
allowed_kwargs = {"dbname": str, "user": str, "password": (str, None), "host": str}
|
||||
for arg, value in kwargs.items():
|
||||
assert arg in allowed_kwargs, f'Invalid arg {arg}="{value}"'
|
||||
assert isinstance(
|
||||
value, allowed_kwargs[arg]
|
||||
), f"Arg {arg} not a {allowed_kwargs[arg]} ({type(value)})"
|
||||
setattr(self, arg, value)
|
||||
|
||||
def close(self):
|
||||
return self.expected_return
|
||||
|
||||
def set_client_encoding(self, *arg):
|
||||
self._check_just_try()
|
||||
assert len(arg) == 1 and isinstance(arg[0], str)
|
||||
if self.expected_exception:
|
||||
raise psycopg2.Error(f"set_client_encoding({arg[0]}): Expected exception")
|
||||
return self.expected_return
|
||||
|
||||
def cursor(self, cursor_factory=None):
|
||||
assert cursor_factory is self.expected_cursor_factory
|
||||
return FakePsycopg2Cursor(
|
||||
self.expected_sql,
|
||||
self.expected_params,
|
||||
self.expected_return,
|
||||
self.expected_just_try or self.just_try,
|
||||
self.expected_exception,
|
||||
)
|
||||
|
||||
def commit(self):
|
||||
self._check_just_try()
|
||||
return self.expected_return
|
||||
|
||||
def rollback(self):
|
||||
self._check_just_try()
|
||||
return self.expected_return
|
||||
|
||||
def _check_just_try(self):
|
||||
if self.just_try:
|
||||
assert False, "May not be executed in just try mode"
|
||||
|
||||
|
||||
def fake_psycopg2_connect(**kwargs):
|
||||
return FakePsycopg2(**kwargs)
|
||||
|
||||
|
||||
def fake_psycopg2_connect_just_try(**kwargs):
|
||||
con = FakePsycopg2(**kwargs)
|
||||
con.just_try = True
|
||||
return con
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_pgdb():
|
||||
return PgDB("127.0.0.1", "user", "password", "dbname")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def fake_pgdb(mocker):
|
||||
mocker.patch("psycopg2.connect", fake_psycopg2_connect)
|
||||
return PgDB("127.0.0.1", "user", "password", "dbname")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def fake_just_try_pgdb(mocker):
|
||||
mocker.patch("psycopg2.connect", fake_psycopg2_connect_just_try)
|
||||
return PgDB("127.0.0.1", "user", "password", "dbname", just_try=True)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def fake_connected_pgdb(fake_pgdb):
|
||||
fake_pgdb.connect()
|
||||
return fake_pgdb
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def fake_connected_just_try_pgdb(fake_just_try_pgdb):
|
||||
fake_just_try_pgdb.connect()
|
||||
return fake_just_try_pgdb
|
||||
|
||||
|
||||
def generate_mock_args(
|
||||
expected_args=(), expected_kwargs={}, expected_return=True
|
||||
): # pylint: disable=dangerous-default-value
|
||||
def mock_args(*args, **kwargs):
|
||||
# pylint: disable=consider-using-f-string
|
||||
assert args == expected_args, "Invalid call args:\n %s\nMay be:\n %s" % (
|
||||
args,
|
||||
expected_args,
|
||||
)
|
||||
# pylint: disable=consider-using-f-string
|
||||
assert kwargs == expected_kwargs, "Invalid call kwargs:\n %s\nMay be:\n %s" % (
|
||||
kwargs,
|
||||
expected_kwargs,
|
||||
)
|
||||
return expected_return
|
||||
|
||||
return mock_args
|
||||
|
||||
|
||||
def mock_doSQL_just_try(self, sql, params=None): # pylint: disable=unused-argument
|
||||
assert False, "doSQL() may not be executed in just try mode"
|
||||
|
||||
|
||||
def generate_mock_doSQL(
|
||||
expected_sql, expected_params={}, expected_return=True
|
||||
): # pylint: disable=dangerous-default-value
|
||||
def mock_doSQL(self, sql, params=None): # pylint: disable=unused-argument
|
||||
# pylint: disable=consider-using-f-string
|
||||
assert sql == expected_sql, "Invalid generated SQL query:\n '%s'\nMay be:\n '%s'" % (
|
||||
sql,
|
||||
expected_sql,
|
||||
)
|
||||
# pylint: disable=consider-using-f-string
|
||||
assert params == expected_params, "Invalid generated params:\n %s\nMay be:\n %s" % (
|
||||
params,
|
||||
expected_params,
|
||||
)
|
||||
return expected_return
|
||||
|
||||
return mock_doSQL
|
||||
|
||||
|
||||
# PgDB.doSelect() have same expected parameters as PgDB.doSQL()
|
||||
generate_mock_doSelect = generate_mock_doSQL
|
||||
mock_doSelect_just_try = mock_doSQL_just_try
|
||||
|
||||
#
|
||||
# Test on PgDB helper methods
|
||||
#
|
||||
|
||||
|
||||
def test_combine_params_with_to_add_parameter():
|
||||
assert PgDB._combine_params({"test1": 1}, {"test2": 2}) == {"test1": 1, "test2": 2}
|
||||
|
||||
|
||||
def test_combine_params_with_kargs():
|
||||
assert PgDB._combine_params({"test1": 1}, test2=2) == {"test1": 1, "test2": 2}
|
||||
|
||||
|
||||
def test_combine_params_with_kargs_and_to_add_parameter():
|
||||
assert PgDB._combine_params({"test1": 1}, {"test2": 2}, test3=3) == {
|
||||
"test1": 1,
|
||||
"test2": 2,
|
||||
"test3": 3,
|
||||
}
|
||||
|
||||
|
||||
def test_format_where_clauses_params_are_preserved():
|
||||
args = ("test = test", {"test1": 1})
|
||||
assert PgDB._format_where_clauses(*args) == args
|
||||
|
||||
|
||||
def test_format_where_clauses_raw():
|
||||
assert PgDB._format_where_clauses("test = test") == ("test = test", {})
|
||||
|
||||
|
||||
def test_format_where_clauses_tuple_clause_with_params():
|
||||
where_clauses = ("test1 = %(test1)s AND test2 = %(test2)s", {"test1": 1, "test2": 2})
|
||||
assert PgDB._format_where_clauses(where_clauses) == where_clauses
|
||||
|
||||
|
||||
def test_format_where_clauses_with_list_as_value():
|
||||
assert PgDB._format_where_clauses({"test": [1, 2]}) == (
|
||||
'"test" IN (%(test_0)s, %(test_1)s)',
|
||||
{"test_0": 1, "test_1": 2},
|
||||
)
|
||||
|
||||
|
||||
def test_format_where_clauses_dict():
|
||||
where_clauses = {"test1": 1, "test2": 2}
|
||||
assert PgDB._format_where_clauses(where_clauses) == (
|
||||
'"test1" = %(test1)s AND "test2" = %(test2)s',
|
||||
where_clauses,
|
||||
)
|
||||
|
||||
|
||||
def test_format_where_clauses_combined_types():
|
||||
where_clauses = ("test1 = 1", ("test2 LIKE %(test2)s", {"test2": 2}), {"test3": 3, "test4": 4})
|
||||
assert PgDB._format_where_clauses(where_clauses) == (
|
||||
'test1 = 1 AND test2 LIKE %(test2)s AND "test3" = %(test3)s AND "test4" = %(test4)s',
|
||||
{"test2": 2, "test3": 3, "test4": 4},
|
||||
)
|
||||
|
||||
|
||||
def test_format_where_clauses_with_where_op():
|
||||
where_clauses = {"test1": 1, "test2": 2}
|
||||
assert PgDB._format_where_clauses(where_clauses, where_op="OR") == (
|
||||
'"test1" = %(test1)s OR "test2" = %(test2)s',
|
||||
where_clauses,
|
||||
)
|
||||
|
||||
|
||||
def test_add_where_clauses():
|
||||
sql = "SELECT * FROM table"
|
||||
where_clauses = {"test1": 1, "test2": 2}
|
||||
assert PgDB._add_where_clauses(sql, None, where_clauses) == (
|
||||
sql + ' WHERE "test1" = %(test1)s AND "test2" = %(test2)s',
|
||||
where_clauses,
|
||||
)
|
||||
|
||||
|
||||
def test_add_where_clauses_preserved_params():
|
||||
sql = "SELECT * FROM table"
|
||||
where_clauses = {"test1": 1, "test2": 2}
|
||||
params = {"fake1": 1}
|
||||
assert PgDB._add_where_clauses(sql, params.copy(), where_clauses) == (
|
||||
sql + ' WHERE "test1" = %(test1)s AND "test2" = %(test2)s',
|
||||
{**where_clauses, **params},
|
||||
)
|
||||
|
||||
|
||||
def test_add_where_clauses_with_op():
|
||||
sql = "SELECT * FROM table"
|
||||
where_clauses = ("test1=1", "test2=2")
|
||||
assert PgDB._add_where_clauses(sql, None, where_clauses, where_op="OR") == (
|
||||
sql + " WHERE test1=1 OR test2=2",
|
||||
{},
|
||||
)
|
||||
|
||||
|
||||
def test_add_where_clauses_with_duplicated_field():
|
||||
sql = "UPDATE table SET test1=%(test1)s"
|
||||
params = {"test1": "new_value"}
|
||||
where_clauses = {"test1": "where_value"}
|
||||
assert PgDB._add_where_clauses(sql, params, where_clauses) == (
|
||||
sql + ' WHERE "test1" = %(test1_1)s',
|
||||
{"test1": "new_value", "test1_1": "where_value"},
|
||||
)
|
||||
|
||||
|
||||
def test_quote_table_name():
|
||||
assert PgDB._quote_table_name("mytable") == '"mytable"'
|
||||
assert PgDB._quote_table_name("myschema.mytable") == '"myschema"."mytable"'
|
||||
|
||||
|
||||
def test_insert(mocker, test_pgdb):
|
||||
values = {"test1": 1, "test2": 2}
|
||||
mocker.patch(
|
||||
"mylib.pgsql.PgDB.doSQL",
|
||||
generate_mock_doSQL(
|
||||
'INSERT INTO "mytable" ("test1", "test2") VALUES (%(test1)s, %(test2)s)', values
|
||||
),
|
||||
)
|
||||
|
||||
assert test_pgdb.insert("mytable", values)
|
||||
|
||||
|
||||
def test_insert_just_try(mocker, test_pgdb):
|
||||
mocker.patch("mylib.pgsql.PgDB.doSQL", mock_doSQL_just_try)
|
||||
assert test_pgdb.insert("mytable", {"test1": 1, "test2": 2}, just_try=True)
|
||||
|
||||
|
||||
def test_update(mocker, test_pgdb):
|
||||
values = {"test1": 1, "test2": 2}
|
||||
where_clauses = {"test3": 3, "test4": 4}
|
||||
mocker.patch(
|
||||
"mylib.pgsql.PgDB.doSQL",
|
||||
generate_mock_doSQL(
|
||||
'UPDATE "mytable" SET "test1" = %(test1)s, "test2" = %(test2)s WHERE "test3" ='
|
||||
' %(test3)s AND "test4" = %(test4)s',
|
||||
{**values, **where_clauses},
|
||||
),
|
||||
)
|
||||
|
||||
assert test_pgdb.update("mytable", values, where_clauses)
|
||||
|
||||
|
||||
def test_update_just_try(mocker, test_pgdb):
|
||||
mocker.patch("mylib.pgsql.PgDB.doSQL", mock_doSQL_just_try)
|
||||
assert test_pgdb.update("mytable", {"test1": 1, "test2": 2}, None, just_try=True)
|
||||
|
||||
|
||||
def test_delete(mocker, test_pgdb):
|
||||
where_clauses = {"test1": 1, "test2": 2}
|
||||
mocker.patch(
|
||||
"mylib.pgsql.PgDB.doSQL",
|
||||
generate_mock_doSQL(
|
||||
'DELETE FROM "mytable" WHERE "test1" = %(test1)s AND "test2" = %(test2)s', where_clauses
|
||||
),
|
||||
)
|
||||
|
||||
assert test_pgdb.delete("mytable", where_clauses)
|
||||
|
||||
|
||||
def test_delete_just_try(mocker, test_pgdb):
|
||||
mocker.patch("mylib.pgsql.PgDB.doSQL", mock_doSQL_just_try)
|
||||
assert test_pgdb.delete("mytable", None, just_try=True)
|
||||
|
||||
|
||||
def test_truncate(mocker, test_pgdb):
|
||||
mocker.patch("mylib.pgsql.PgDB.doSQL", generate_mock_doSQL('TRUNCATE TABLE "mytable"', None))
|
||||
|
||||
assert test_pgdb.truncate("mytable")
|
||||
|
||||
|
||||
def test_truncate_just_try(mocker, test_pgdb):
|
||||
mocker.patch("mylib.pgsql.PgDB.doSQL", mock_doSelect_just_try)
|
||||
assert test_pgdb.truncate("mytable", just_try=True)
|
||||
|
||||
|
||||
def test_select(mocker, test_pgdb):
|
||||
fields = ("field1", "field2")
|
||||
where_clauses = {"test3": 3, "test4": 4}
|
||||
expected_return = [
|
||||
{"field1": 1, "field2": 2},
|
||||
{"field1": 2, "field2": 3},
|
||||
]
|
||||
order_by = "field1, DESC"
|
||||
limit = 10
|
||||
mocker.patch(
|
||||
"mylib.pgsql.PgDB.doSelect",
|
||||
generate_mock_doSQL(
|
||||
'SELECT "field1", "field2" FROM "mytable" WHERE "test3" = %(test3)s AND "test4" ='
|
||||
" %(test4)s ORDER BY " + order_by + " LIMIT " + str(limit), # nosec: B608
|
||||
where_clauses,
|
||||
expected_return,
|
||||
),
|
||||
)
|
||||
|
||||
assert (
|
||||
test_pgdb.select("mytable", where_clauses, fields, order_by=order_by, limit=limit)
|
||||
== expected_return
|
||||
)
|
||||
|
||||
|
||||
def test_select_without_field_and_order_by(mocker, test_pgdb):
|
||||
mocker.patch("mylib.pgsql.PgDB.doSelect", generate_mock_doSQL('SELECT * FROM "mytable"'))
|
||||
|
||||
assert test_pgdb.select("mytable")
|
||||
|
||||
|
||||
def test_select_just_try(mocker, test_pgdb):
|
||||
mocker.patch("mylib.pgsql.PgDB.doSQL", mock_doSelect_just_try)
|
||||
assert test_pgdb.select("mytable", None, None, just_try=True)
|
||||
|
||||
|
||||
#
|
||||
# Tests on main methods
|
||||
#
|
||||
|
||||
|
||||
def test_connect(mocker, test_pgdb):
|
||||
expected_kwargs = {
|
||||
"dbname": test_pgdb._db,
|
||||
"user": test_pgdb._user,
|
||||
"host": test_pgdb._host,
|
||||
"password": test_pgdb._pwd,
|
||||
}
|
||||
|
||||
mocker.patch("psycopg2.connect", generate_mock_args(expected_kwargs=expected_kwargs))
|
||||
|
||||
assert test_pgdb.connect()
|
||||
|
||||
|
||||
def test_close(fake_pgdb):
|
||||
assert fake_pgdb.close() is None
|
||||
|
||||
|
||||
def test_close_connected(fake_connected_pgdb):
|
||||
assert fake_connected_pgdb.close() is None
|
||||
|
||||
|
||||
def test_setEncoding(fake_connected_pgdb):
|
||||
assert fake_connected_pgdb.setEncoding("utf8")
|
||||
|
||||
|
||||
def test_setEncoding_not_connected(fake_pgdb):
|
||||
assert fake_pgdb.setEncoding("utf8") is False
|
||||
|
||||
|
||||
def test_setEncoding_on_exception(fake_connected_pgdb):
|
||||
fake_connected_pgdb._conn.expected_exception = True
|
||||
assert fake_connected_pgdb.setEncoding("utf8") is False
|
||||
|
||||
|
||||
def test_doSQL(fake_connected_pgdb):
|
||||
fake_connected_pgdb._conn.expected_sql = "DELETE FROM table WHERE test1 = %(test1)s"
|
||||
fake_connected_pgdb._conn.expected_params = {"test1": 1}
|
||||
fake_connected_pgdb.doSQL(
|
||||
fake_connected_pgdb._conn.expected_sql, fake_connected_pgdb._conn.expected_params
|
||||
)
|
||||
|
||||
|
||||
def test_doSQL_without_params(fake_connected_pgdb):
|
||||
fake_connected_pgdb._conn.expected_sql = "DELETE FROM table"
|
||||
fake_connected_pgdb.doSQL(fake_connected_pgdb._conn.expected_sql)
|
||||
|
||||
|
||||
def test_doSQL_just_try(fake_connected_just_try_pgdb):
|
||||
assert fake_connected_just_try_pgdb.doSQL("DELETE FROM table")
|
||||
|
||||
|
||||
def test_doSQL_on_exception(fake_connected_pgdb):
|
||||
fake_connected_pgdb._conn.expected_exception = True
|
||||
assert fake_connected_pgdb.doSQL("DELETE FROM table") is False
|
||||
|
||||
|
||||
def test_doSelect(fake_connected_pgdb):
|
||||
fake_connected_pgdb._conn.expected_sql = "SELECT * FROM table WHERE test1 = %(test1)s"
|
||||
fake_connected_pgdb._conn.expected_params = {"test1": 1}
|
||||
fake_connected_pgdb._conn.expected_cursor_factory = RealDictCursor
|
||||
fake_connected_pgdb._conn.expected_return = [{"test1": 1}]
|
||||
assert (
|
||||
fake_connected_pgdb.doSelect(
|
||||
fake_connected_pgdb._conn.expected_sql, fake_connected_pgdb._conn.expected_params
|
||||
)
|
||||
== fake_connected_pgdb._conn.expected_return
|
||||
)
|
||||
|
||||
|
||||
def test_doSelect_without_params(fake_connected_pgdb):
|
||||
fake_connected_pgdb._conn.expected_sql = "SELECT * FROM table"
|
||||
fake_connected_pgdb._conn.expected_cursor_factory = RealDictCursor
|
||||
fake_connected_pgdb._conn.expected_return = [{"test1": 1}]
|
||||
assert (
|
||||
fake_connected_pgdb.doSelect(fake_connected_pgdb._conn.expected_sql)
|
||||
== fake_connected_pgdb._conn.expected_return
|
||||
)
|
||||
|
||||
|
||||
def test_doSelect_on_exception(fake_connected_pgdb):
|
||||
fake_connected_pgdb._conn.expected_cursor_factory = RealDictCursor
|
||||
fake_connected_pgdb._conn.expected_exception = True
|
||||
assert fake_connected_pgdb.doSelect("SELECT * FROM table") is False
|
||||
|
||||
|
||||
def test_doSelect_just_try(fake_connected_just_try_pgdb):
|
||||
fake_connected_just_try_pgdb._conn.expected_sql = "SELECT * FROM table WHERE test1 = %(test1)s"
|
||||
fake_connected_just_try_pgdb._conn.expected_params = {"test1": 1}
|
||||
fake_connected_just_try_pgdb._conn.expected_cursor_factory = RealDictCursor
|
||||
fake_connected_just_try_pgdb._conn.expected_return = [{"test1": 1}]
|
||||
assert (
|
||||
fake_connected_just_try_pgdb.doSelect(
|
||||
fake_connected_just_try_pgdb._conn.expected_sql,
|
||||
fake_connected_just_try_pgdb._conn.expected_params,
|
||||
)
|
||||
== fake_connected_just_try_pgdb._conn.expected_return
|
||||
)
|
39
tests/test_telltale.py
Normal file
39
tests/test_telltale.py
Normal file
|
@ -0,0 +1,39 @@
|
|||
# pylint: disable=missing-function-docstring
|
||||
""" Tests on opening hours helpers """
|
||||
|
||||
import datetime
|
||||
import os
|
||||
|
||||
import pytest
|
||||
|
||||
from mylib.telltale import TelltaleFile
|
||||
|
||||
|
||||
def test_create_telltale_file(tmp_path):
|
||||
filename = "test"
|
||||
file = TelltaleFile(filename=filename, dirpath=tmp_path)
|
||||
assert file.filename == filename
|
||||
assert file.dirpath == tmp_path
|
||||
assert file.filepath == os.path.join(tmp_path, filename)
|
||||
|
||||
assert not os.path.exists(file.filepath)
|
||||
assert file.last_update is None
|
||||
file.update()
|
||||
assert os.path.exists(file.filepath)
|
||||
assert isinstance(file.last_update, datetime.datetime)
|
||||
|
||||
|
||||
def test_create_telltale_file_with_filepath_and_invalid_dirpath():
|
||||
with pytest.raises(AssertionError):
|
||||
TelltaleFile(filepath="/tmp/test", dirpath="/var/tmp") # nosec: B108
|
||||
|
||||
|
||||
def test_create_telltale_file_with_filepath_and_invalid_filename():
|
||||
with pytest.raises(AssertionError):
|
||||
TelltaleFile(filepath="/tmp/test", filename="other") # nosec: B108
|
||||
|
||||
|
||||
def test_remove_telltale_file(tmp_path):
|
||||
file = TelltaleFile(filename="test", dirpath=tmp_path)
|
||||
file.update()
|
||||
assert file.remove()
|
|
@ -1,53 +0,0 @@
|
|||
import os, sys
|
||||
sys.path.insert(0,os.path.dirname(os.path.dirname(os.path.abspath( __file__ ))))
|
||||
import opening_hours
|
||||
import datetime, logging
|
||||
|
||||
debug=True
|
||||
|
||||
if debug:
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
else:
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
|
||||
exceptional_closures=["22/09/2017", "20/09/2017-22/09/2017", "20/09/2017-22/09/2017 18/09/2017","25/11/2017", "26/11/2017 9h30-12h30"]
|
||||
print "Raw exceptional closures value : %s" % exceptional_closures
|
||||
print "Parsed exceptional closures : %s" % opening_hours.parse_exceptional_closures(exceptional_closures)
|
||||
|
||||
normal_opening_hours=["lundi-mardi jeudi 9h30-12h30 14h-16h30", "mercredi vendredi 9h30-12h30 14h-17h"]
|
||||
print "Raw normal opening hours : %s" % normal_opening_hours
|
||||
print "Parsed normal opening hours : %s" % opening_hours.parse_normal_opening_hours(normal_opening_hours)
|
||||
|
||||
nonworking_public_holidays=[
|
||||
'1janvier',
|
||||
'paques',
|
||||
'lundi_paques',
|
||||
'1mai',
|
||||
'8mai',
|
||||
'jeudi_ascension',
|
||||
'lundi_pentecote',
|
||||
'14juillet',
|
||||
'15aout',
|
||||
'1novembre',
|
||||
'11novembre',
|
||||
'noel',
|
||||
]
|
||||
print "Raw nonworking_public_holidays values : %s" % nonworking_public_holidays
|
||||
|
||||
print "Is closed (now) : %s" % opening_hours.is_closed(normal_opening_hours,exceptional_closures,nonworking_public_holidays)
|
||||
|
||||
tests=[
|
||||
{ 'date_time': datetime.datetime(2017, 5, 1, 20, 15), 'result': {'exceptional_closure': False, 'closed': True, 'exceptional_closure_all_day': False} },
|
||||
{ 'date_time': datetime.datetime(2017, 5, 2, 15, 15), 'result': {'exceptional_closure': False, 'closed': False, 'exceptional_closure_all_day': False} },
|
||||
{ 'date_time': datetime.datetime(2017, 12, 25, 20, 15), 'result': {'exceptional_closure': False, 'closed': True, 'exceptional_closure_all_day': False} },
|
||||
{ 'date_time': datetime.datetime(2017, 9, 22, 15, 15), 'result': {'exceptional_closure': True, 'closed': True, 'exceptional_closure_all_day': True} },
|
||||
{ 'date_time': datetime.datetime(2017, 11, 25, 15, 15), 'result': {'exceptional_closure': True, 'closed': True, 'exceptional_closure_all_day': True} },
|
||||
{ 'date_time': datetime.datetime(2017, 11, 26, 11, 15), 'result': {'exceptional_closure': True, 'closed': True, 'exceptional_closure_all_day': False} },
|
||||
]
|
||||
for test in tests:
|
||||
result=opening_hours.is_closed(normal_opening_hours,exceptional_closures,nonworking_public_holidays, test['date_time'])
|
||||
if result == test['result']:
|
||||
status='OK'
|
||||
else:
|
||||
status='ERROR'
|
||||
print "Is closed (%s) : %s => %s" % (test['date_time'].isoformat(), result, status)
|
Loading…
Reference in a new issue