{% endcall %}
{% endblock %}
diff --git a/towncrier/newsfragments/224.feature b/towncrier/newsfragments/224.feature
new file mode 100644
index 00000000..9a2f479b
--- /dev/null
+++ b/towncrier/newsfragments/224.feature
@@ -0,0 +1 @@
+Add instructions on how to create DNS records for email client auto-configuration (RFC6186 style)
From 80f939cf1ac3fe9863825de1360ee7c5d67926f6 Mon Sep 17 00:00:00 2001
From: Florent Daigniere
Date: Mon, 8 Feb 2021 10:16:03 +0100
Subject: [PATCH 050/181] Revert to the old behaviour when ADMIN=false
---
core/nginx/conf/nginx.conf | 10 +++++++---
webmails/rainloop/application.ini | 4 +++-
webmails/roundcube/config.inc.php | 10 ++++++----
3 files changed, 16 insertions(+), 8 deletions(-)
diff --git a/core/nginx/conf/nginx.conf b/core/nginx/conf/nginx.conf
index a7a9e134..81f1ac0d 100644
--- a/core/nginx/conf/nginx.conf
+++ b/core/nginx/conf/nginx.conf
@@ -133,10 +133,12 @@ http {
{% endif %}
include /etc/nginx/proxy.conf;
client_max_body_size {{ MESSAGE_SIZE_LIMIT|int + 8388608 }};
- auth_request /internal/auth/user;
proxy_pass http://$webmail;
+ {% if ADMIN == 'true' %}
+ auth_request /internal/auth/user;
error_page 403 @webmail_login;
}
+
location {{ WEB_WEBMAIL }}/sso.php {
{% if WEB_WEBMAIL != '/' %}
rewrite ^({{ WEB_WEBMAIL }})$ $1/ permanent;
@@ -152,11 +154,13 @@ http {
proxy_pass http://$webmail;
error_page 403 @webmail_login;
}
+
location @webmail_login {
return 302 {{ WEB_ADMIN }}/ui/login?next=ui.webmail;
}
- {% endif %}
-
+ {% else %}
+ }
+ {% endif %}{% endif %}
{% if ADMIN == 'true' %}
location {{ WEB_ADMIN }} {
return 301 {{ WEB_ADMIN }}/ui;
diff --git a/webmails/rainloop/application.ini b/webmails/rainloop/application.ini
index bc953af1..0504f174 100644
--- a/webmails/rainloop/application.ini
+++ b/webmails/rainloop/application.ini
@@ -7,9 +7,11 @@ attachment_size_limit = {{ MAX_FILESIZE }}
allow_admin_panel = Off
[labs]
+allow_gravatar = Off
+{% if ADMIN == "true" %}
custom_login_link='sso.php'
custom_logout_link='{{ WEB_ADMIN }}/ui/logout'
-allow_gravatar = Off
+{% endif %}
[contacts]
enable = On
diff --git a/webmails/roundcube/config.inc.php b/webmails/roundcube/config.inc.php
index bb1a5e84..d8028db3 100644
--- a/webmails/roundcube/config.inc.php
+++ b/webmails/roundcube/config.inc.php
@@ -17,8 +17,7 @@ $config['plugins'] = array(
'markasjunk',
'managesieve',
'enigma',
- 'carddav',
- 'mailu'
+ 'carddav'
);
$front = getenv('FRONT_ADDRESS') ? getenv('FRONT_ADDRESS') : 'front';
@@ -37,8 +36,11 @@ $config['managesieve_host'] = $imap;
$config['managesieve_usetls'] = false;
// Customization settings
-$config['support_url'] = getenv('WEB_ADMIN') ? '../..' . getenv('WEB_ADMIN') : '';
-$config['sso_logout_url'] = getenv('WEB_ADMIN').'/ui/logout';
+if (filter_var(getenv('ADMIN'), FILTER_VALIDATE_BOOLEAN, FILTER_NULL_ON_FAILURE)) {
+ array_push($config['plugins'], 'mailu');
+ $config['support_url'] = getenv('WEB_ADMIN') ? '../..' . getenv('WEB_ADMIN') : '';
+ $config['sso_logout_url'] = getenv('WEB_ADMIN').'/ui/logout';
+}
$config['product_name'] = 'Mailu Webmail';
// We access the IMAP and SMTP servers locally with internal names, SSL
From 0917a6817f845c01a2247d74f111abc2ef54e51a Mon Sep 17 00:00:00 2001
From: Florent Daigniere
Date: Mon, 8 Feb 2021 10:17:43 +0100
Subject: [PATCH 051/181] Set ADMIN=false to ensure that the tests pass
---
tests/compose/rainloop/mailu.env | 2 +-
tests/compose/roundcube/mailu.env | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/tests/compose/rainloop/mailu.env b/tests/compose/rainloop/mailu.env
index 9c31c8bb..47b0b934 100644
--- a/tests/compose/rainloop/mailu.env
+++ b/tests/compose/rainloop/mailu.env
@@ -51,7 +51,7 @@ DISABLE_STATISTICS=False
###################################
# Expose the admin interface (value: true, false)
-ADMIN=true
+ADMIN=false
# Choose which webmail to run if any (values: roundcube, rainloop, none)
WEBMAIL=rainloop
diff --git a/tests/compose/roundcube/mailu.env b/tests/compose/roundcube/mailu.env
index dc503268..887cebbd 100644
--- a/tests/compose/roundcube/mailu.env
+++ b/tests/compose/roundcube/mailu.env
@@ -51,7 +51,7 @@ DISABLE_STATISTICS=False
###################################
# Expose the admin interface (value: true, false)
-ADMIN=true
+ADMIN=false
# Choose which webmail to run if any (values: roundcube, rainloop, none)
WEBMAIL=roundcube
From e8f70c12dcdce1064ea063cc5ddcae28a5221613 Mon Sep 17 00:00:00 2001
From: Florent Daigniere
Date: Mon, 8 Feb 2021 10:22:25 +0100
Subject: [PATCH 052/181] avoid a warning
---
webmails/roundcube/start.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/webmails/roundcube/start.py b/webmails/roundcube/start.py
index 9ce383c8..45b2aa76 100755
--- a/webmails/roundcube/start.py
+++ b/webmails/roundcube/start.py
@@ -40,7 +40,7 @@ os.system("mkdir -p /data/gpg /var/www/html/logs")
os.system("touch /var/www/html/logs/errors.log")
os.system("chown -R www-data:www-data /var/www/html/logs")
os.system("chmod -R a+rX /var/www/html/")
-os.system("ln -s /var/www/html/index.php /var/www/html/sso.php")
+os.system("ln -sf /var/www/html/index.php /var/www/html/sso.php")
try:
print("Initializing database")
From 6c4fa5432f3f040cc16fca870f0382939ae2817b Mon Sep 17 00:00:00 2001
From: Nils Vogels
Date: Thu, 11 Feb 2021 12:03:07 +0100
Subject: [PATCH 053/181] Provide fix in postgresql container for
CVE-2021-23240, CVE-2021-3156, CVE-2021-23239
---
optional/postgresql/Dockerfile | 1 +
1 file changed, 1 insertion(+)
diff --git a/optional/postgresql/Dockerfile b/optional/postgresql/Dockerfile
index 95048147..ff25a66f 100644
--- a/optional/postgresql/Dockerfile
+++ b/optional/postgresql/Dockerfile
@@ -3,6 +3,7 @@ FROM $DISTRO
# python3 shared with most images
RUN apk add --no-cache \
python3 py3-pip bash py3-multidict \
+ && apk add --upgrade sudo \
&& pip3 install --upgrade pip
# Shared layer between nginx, dovecot, postfix, postgresql, rspamd, unbound, rainloop, roundcube
From 3b7ecb3a8b4c466ad2f37276107bb49efe978758 Mon Sep 17 00:00:00 2001
From: Nils Vogels
Date: Thu, 11 Feb 2021 12:12:06 +0100
Subject: [PATCH 054/181] Add changelog
---
towncrier/newsfragments/1760.bugfix | 2 ++
1 file changed, 2 insertions(+)
create mode 100644 towncrier/newsfragments/1760.bugfix
diff --git a/towncrier/newsfragments/1760.bugfix b/towncrier/newsfragments/1760.bugfix
new file mode 100644
index 00000000..9d6f38af
--- /dev/null
+++ b/towncrier/newsfragments/1760.bugfix
@@ -0,0 +1,2 @@
+Fix CVE-2021-23240, CVE-2021-3156 and CVE-2021-23239 for postgresql
+by force-upgrading sudo.
From 68caf501549d08c048a7c232ac72bb8a072dd9e6 Mon Sep 17 00:00:00 2001
From: Alexander Graf
Date: Mon, 15 Feb 2021 00:46:59 +0100
Subject: [PATCH 055/181] new import/export using marshmallow
---
core/admin/mailu/manage.py | 218 +++++----
core/admin/mailu/models.py | 132 +++---
core/admin/mailu/schemas.py | 608 +++++++++++++++++++++-----
docs/cli.rst | 221 ++++++----
tests/compose/core/02_forward_test.sh | 4 +-
tests/compose/core/03_alias_test.sh | 4 +-
tests/compose/core/04_reply_test.sh | 4 +-
towncrier/newsfragments/1604.feature | 2 +-
8 files changed, 851 insertions(+), 342 deletions(-)
diff --git a/core/admin/mailu/manage.py b/core/admin/mailu/manage.py
index e02d9ad4..a8d1d3cb 100644
--- a/core/admin/mailu/manage.py
+++ b/core/admin/mailu/manage.py
@@ -4,7 +4,6 @@
import sys
import os
import socket
-import json
import logging
import uuid
@@ -20,7 +19,7 @@ from flask.cli import FlaskGroup, with_appcontext
from marshmallow.exceptions import ValidationError
from . import models
-from .schemas import MailuSchema, get_schema
+from .schemas import MailuSchema, get_schema, get_fieldspec, colorize, RenderJSON, HIDDEN
db = models.db
@@ -182,7 +181,7 @@ def user_import(localpart, domain_name, password_hash, hash_scheme = None):
db.session.commit()
-# TODO: remove this deprecated function
+# TODO: remove deprecated config_update function?
@mailu.command()
@click.option('-v', '--verbose')
@click.option('-d', '--delete-objects')
@@ -324,17 +323,16 @@ def config_update(verbose=False, delete_objects=False):
db.session.commit()
-SECTIONS = {'domains', 'relays', 'users', 'aliases'}
-
-
@mailu.command()
-@click.option('-v', '--verbose', count=True, help='Increase verbosity')
-@click.option('-q', '--quiet', is_flag=True, help='Quiet mode - only show errors')
-@click.option('-u', '--update', is_flag=True, help='Update mode - merge input with existing config')
-@click.option('-n', '--dry-run', is_flag=True, help='Perform a trial run with no changes made')
+@click.option('-v', '--verbose', count=True, help='Increase verbosity.')
+@click.option('-s', '--secrets', is_flag=True, help='Show secret attributes in messages.')
+@click.option('-q', '--quiet', is_flag=True, help='Quiet mode - only show errors.')
+@click.option('-c', '--color', is_flag=True, help='Force colorized output.')
+@click.option('-u', '--update', is_flag=True, help='Update mode - merge input with existing config.')
+@click.option('-n', '--dry-run', is_flag=True, help='Perform a trial run with no changes made.')
@click.argument('source', metavar='[FILENAME|-]', type=click.File(mode='r'), default=sys.stdin)
@with_appcontext
-def config_import(verbose=0, quiet=False, update=False, dry_run=False, source=None):
+def config_import(verbose=0, secrets=False, quiet=False, color=False, update=False, dry_run=False, source=None):
""" Import configuration as YAML or JSON from stdin or file
"""
@@ -344,12 +342,19 @@ def config_import(verbose=0, quiet=False, update=False, dry_run=False, source=No
# 2 : also show secrets
# 3 : also show input data
# 4 : also show sql queries
+ # 5 : also show tracebacks
if quiet:
verbose = -1
+ color_cfg = {
+ 'color': color or sys.stdout.isatty(),
+ 'lexer': 'python',
+ 'strip': True,
+ }
+
counter = Counter()
- dumper = {}
+ logger = {}
def format_errors(store, path=None):
@@ -387,19 +392,26 @@ def config_import(verbose=0, quiet=False, update=False, dry_run=False, source=No
last = action
changes.append(f'{what}({count})')
else:
- changes = 'no changes.'
+ changes = ['No changes.']
return chain(message, changes)
def log(action, target, message=None):
if message is None:
- message = json.dumps(dumper[target.__class__].dump(target), ensure_ascii=False)
- print(f'{action} {target.__table__}: {message}')
+ # TODO: convert nested OrderedDict to dict
+ # see: flask mailu config-import -nvv yaml/dump4.yaml
+ try:
+ message = dict(logger[target.__class__].dump(target))
+ except KeyError:
+ message = target
+ if not isinstance(message, str):
+ message = repr(message)
+ print(f'{action} {target.__table__}: {colorize(message, **color_cfg)}')
def listen_insert(mapper, connection, target): # pylint: disable=unused-argument
""" callback function to track import """
- counter.update([('Added', target.__table__.name)])
+ counter.update([('Created', target.__table__.name)])
if verbose >= 1:
- log('Added', target)
+ log('Created', target)
def listen_update(mapper, connection, target): # pylint: disable=unused-argument
""" callback function to track import """
@@ -407,32 +419,32 @@ def config_import(verbose=0, quiet=False, update=False, dry_run=False, source=No
changed = {}
inspection = sqlalchemy.inspect(target)
for attr in sqlalchemy.orm.class_mapper(target.__class__).column_attrs:
- if getattr(inspection.attrs, attr.key).history.has_changes():
- if sqlalchemy.orm.attributes.get_history(target, attr.key)[2]:
- before = sqlalchemy.orm.attributes.get_history(target, attr.key)[2].pop()
- after = getattr(target, attr.key)
- # only remember changed keys
- if before != after and (before or after):
- if verbose >= 1:
- changed[str(attr.key)] = (before, after)
- else:
- break
+ history = getattr(inspection.attrs, attr.key).history
+ if history.has_changes() and history.deleted:
+ before = history.deleted[-1]
+ after = getattr(target, attr.key)
+ # TODO: remove special handling of "comment" after modifying model
+ if attr.key == 'comment' and not before and not after:
+ pass
+ # only remember changed keys
+ elif before != after:
+ if verbose >= 1:
+ changed[str(attr.key)] = (before, after)
+ else:
+ break
if verbose >= 1:
# use schema with dump_context to hide secrets and sort keys
- primary = json.dumps(str(target), ensure_ascii=False)
- dumped = get_schema(target)(only=changed.keys(), context=dump_context).dump(target)
+ dumped = get_schema(target)(only=changed.keys(), context=diff_context).dump(target)
for key, value in dumped.items():
before, after = changed[key]
- if value == '':
- before = '' if before else before
- after = '' if after else after
+ if value == HIDDEN:
+ before = HIDDEN if before else before
+ after = HIDDEN if after else after
else:
- # TODO: use schema to "convert" before value?
+ # TODO: need to use schema to "convert" before value?
after = value
- before = json.dumps(before, ensure_ascii=False)
- after = json.dumps(after, ensure_ascii=False)
- log('Modified', target, f'{primary} {key}: {before} -> {after}')
+ log('Modified', target, f'{str(target)!r} {key}: {before!r} -> {after!r}')
if changed:
counter.update([('Modified', target.__table__.name)])
@@ -443,47 +455,60 @@ def config_import(verbose=0, quiet=False, update=False, dry_run=False, source=No
if verbose >= 1:
log('Deleted', target)
- # this listener should not be necessary, when:
- # dkim keys should be stored in database and it should be possible to store multiple
- # keys per domain. the active key would be also stored on disk on commit.
+ # TODO: this listener will not be necessary, if dkim keys would be stored in database
+ _dedupe_dkim = set()
def listen_dkim(session, flush_context): # pylint: disable=unused-argument
""" callback function to track import """
for target in session.identity_map.values():
- if not isinstance(target, models.Domain):
+ # look at Domains originally loaded from db
+ if not isinstance(target, models.Domain) or not target._sa_instance_state.load_path:
continue
- primary = json.dumps(str(target), ensure_ascii=False)
before = target._dkim_key_on_disk
after = target._dkim_key
- if before != after and (before or after):
- if verbose >= 2:
+ if before != after:
+ if secrets:
before = before.decode('ascii', 'ignore')
after = after.decode('ascii', 'ignore')
else:
- before = '' if before else ''
- after = '' if after else ''
- before = json.dumps(before, ensure_ascii=False)
- after = json.dumps(after, ensure_ascii=False)
- log('Modified', target, f'{primary} dkim_key: {before} -> {after}')
- counter.update([('Modified', target.__table__.name)])
+ before = HIDDEN if before else ''
+ after = HIDDEN if after else ''
+ # "de-dupe" messages; this event is fired at every flush
+ if not (target, before, after) in _dedupe_dkim:
+ _dedupe_dkim.add((target, before, after))
+ counter.update([('Modified', target.__table__.name)])
+ if verbose >= 1:
+ log('Modified', target, f'{str(target)!r} dkim_key: {before!r} -> {after!r}')
- def track_serialize(self, item):
+ def track_serialize(obj, item):
""" callback function to track import """
- log('Handling', self.opts.model, item)
+ # hide secrets
+ data = logger[obj.opts.model].hide(item)
+ if 'hash_password' in data:
+ data['password'] = HIDDEN
+ if 'fetches' in data:
+ for fetch in data['fetches']:
+ fetch['password'] = HIDDEN
+ log('Handling', obj.opts.model, data)
# configure contexts
- dump_context = {
- 'secrets': verbose >= 2,
+ diff_context = {
+ 'full': True,
+ 'secrets': secrets,
+ }
+ log_context = {
+ 'secrets': secrets,
}
load_context = {
- 'callback': track_serialize if verbose >= 3 else None,
- 'clear': not update,
'import': True,
+ 'update': update,
+ 'clear': not update,
+ 'callback': track_serialize if verbose >= 2 else None,
}
# register listeners
for schema in get_schema():
model = schema.Meta.model
- dumper[model] = schema(context=dump_context)
+ logger[model] = schema(context=log_context)
sqlalchemy.event.listen(model, 'after_insert', listen_insert)
sqlalchemy.event.listen(model, 'after_update', listen_update)
sqlalchemy.event.listen(model, 'after_delete', listen_delete)
@@ -491,18 +516,24 @@ def config_import(verbose=0, quiet=False, update=False, dry_run=False, source=No
# special listener for dkim_key changes
sqlalchemy.event.listen(db.session, 'after_flush', listen_dkim)
- if verbose >= 4:
+ if verbose >= 3:
logging.basicConfig()
logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)
try:
with models.db.session.no_autoflush:
- config = MailuSchema(only=SECTIONS, context=load_context).loads(source)
+ config = MailuSchema(only=MailuSchema.Meta.order, context=load_context).loads(source)
except ValidationError as exc:
raise click.ClickException(format_errors(exc.messages)) from exc
except Exception as exc:
- # (yaml.scanner.ScannerError, UnicodeDecodeError, ...)
- raise click.ClickException(f'[{exc.__class__.__name__}] {" ".join(str(exc).split())}') from exc
+ if verbose >= 5:
+ raise
+ else:
+ # (yaml.scanner.ScannerError, UnicodeDecodeError, ...)
+ raise click.ClickException(
+ f'[{exc.__class__.__name__}] '
+ f'{" ".join(str(exc).split())}'
+ ) from exc
# flush session to show/count all changes
if dry_run or verbose >= 1:
@@ -510,53 +541,47 @@ def config_import(verbose=0, quiet=False, update=False, dry_run=False, source=No
# check for duplicate domain names
dup = set()
- for fqdn in chain(db.session.query(models.Domain.name),
- db.session.query(models.Alternative.name),
- db.session.query(models.Relay.name)):
+ for fqdn in chain(
+ db.session.query(models.Domain.name),
+ db.session.query(models.Alternative.name),
+ db.session.query(models.Relay.name)
+ ):
if fqdn in dup:
raise click.ClickException(f'[ValidationError] Duplicate domain name: {fqdn}')
dup.add(fqdn)
- # TODO: implement special update "items"
- # -pkey: which - remove item "which"
- # -key: null or [] or {} - set key to default
- # -pkey: null or [] or {} - remove all existing items in this list
-
# don't commit when running dry
if dry_run:
- db.session.rollback()
if not quiet:
print(*format_changes('Dry run. Not commiting changes.'))
- # TODO: remove debug
- print(MailuSchema().dumps(config))
+ db.session.rollback()
else:
- db.session.commit()
if not quiet:
- print(*format_changes('Commited changes.'))
+ print(*format_changes('Committing changes.'))
+ db.session.commit()
@mailu.command()
-@click.option('-f', '--full', is_flag=True, help='Include attributes with default value')
+@click.option('-f', '--full', is_flag=True, help='Include attributes with default value.')
@click.option('-s', '--secrets', is_flag=True,
- help='Include secret attributes (dkim-key, passwords)')
-@click.option('-d', '--dns', is_flag=True, help='Include dns records')
+ help='Include secret attributes (dkim-key, passwords).')
+@click.option('-c', '--color', is_flag=True, help='Force colorized output.')
+@click.option('-d', '--dns', is_flag=True, help='Include dns records.')
@click.option('-o', '--output-file', 'output', default=sys.stdout, type=click.File(mode='w'),
- help='save yaml to file')
-@click.option('-j', '--json', 'as_json', is_flag=True, help='Dump in josn format')
-@click.argument('sections', nargs=-1)
+ help='Save configuration to file.')
+@click.option('-j', '--json', 'as_json', is_flag=True, help='Export configuration in json format.')
+@click.argument('only', metavar='[FILTER]...', nargs=-1)
@with_appcontext
-def config_export(full=False, secrets=False, dns=False, output=None, as_json=False, sections=None):
+def config_export(full=False, secrets=False, color=False, dns=False, output=None, as_json=False, only=None):
""" Export configuration as YAML or JSON to stdout or file
"""
- if sections:
- for section in sections:
- if section not in SECTIONS:
- print(f'[ERROR] Unknown section: {section}')
- raise click.exceptions.Exit(1)
- sections = set(sections)
+ if only:
+ for spec in only:
+ if spec.split('.', 1)[0] not in MailuSchema.Meta.order:
+ raise click.ClickException(f'[ERROR] Unknown section: {spec}')
else:
- sections = SECTIONS
+ only = MailuSchema.Meta.order
context = {
'full': full,
@@ -564,13 +589,20 @@ def config_export(full=False, secrets=False, dns=False, output=None, as_json=Fal
'dns': dns,
}
- if as_json:
- schema = MailuSchema(only=sections, context=context)
- schema.opts.render_module = json
- print(schema.dumps(models.MailuConfig(), separators=(',',':')), file=output)
+ schema = MailuSchema(only=only, context=context)
+ color_cfg = {'color': color or output.isatty()}
- else:
- MailuSchema(only=sections, context=context).dumps(models.MailuConfig(), output)
+ if as_json:
+ schema.opts.render_module = RenderJSON
+ color_cfg['lexer'] = 'json'
+ color_cfg['strip'] = True
+
+ try:
+ print(colorize(schema.dumps(models.MailuConfig()), **color_cfg), file=output)
+ except ValueError as exc:
+ if spec := get_fieldspec(exc):
+ raise click.ClickException(f'[ERROR] Invalid filter: {spec}') from exc
+ raise
@mailu.command()
diff --git a/core/admin/mailu/models.py b/core/admin/mailu/models.py
index dac1dc70..5799e282 100644
--- a/core/admin/mailu/models.py
+++ b/core/admin/mailu/models.py
@@ -19,6 +19,7 @@ import dns
from flask import current_app as app
from sqlalchemy.ext import declarative
+from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy.inspection import inspect
from werkzeug.utils import cached_property
@@ -121,6 +122,36 @@ class Base(db.Model):
updated_at = db.Column(db.Date, nullable=True, onupdate=date.today)
comment = db.Column(db.String(255), nullable=True, default='')
+ def __str__(self):
+ pkey = self.__table__.primary_key.columns.values()[0].name
+ if pkey == 'email':
+ # ugly hack for email declared attr. _email is not always up2date
+ return str(f'{self.localpart}@{self.domain_name}')
+ elif pkey in {'name', 'email'}:
+ return str(getattr(self, pkey, None))
+ else:
+ return self.__repr__()
+ return str(getattr(self, self.__table__.primary_key.columns.values()[0].name))
+
+ def __repr__(self):
+ return f'<{self.__class__.__name__} {str(self)!r}>'
+
+ def __eq__(self, other):
+ if isinstance(other, self.__class__):
+ pkey = self.__table__.primary_key.columns.values()[0].name
+ this = getattr(self, pkey, None)
+ other = getattr(other, pkey, None)
+ return this is not None and other is not None and str(this) == str(other)
+ else:
+ return NotImplemented
+
+ def __hash__(self):
+ primary = getattr(self, self.__table__.primary_key.columns.values()[0].name)
+ if primary is None:
+ return NotImplemented
+ else:
+ return hash(primary)
+
# Many-to-many association table for domain managers
managers = db.Table('manager', Base.metadata,
@@ -261,19 +292,6 @@ class Domain(Base):
except dns.exception.DNSException:
return False
- def __str__(self):
- return str(self.name)
-
- def __eq__(self, other):
- if isinstance(other, self.__class__):
- return str(self.name) == str(other.name)
- else:
- return NotImplemented
-
- def __hash__(self):
- return hash(str(self.name))
-
-
class Alternative(Base):
""" Alternative name for a served domain.
@@ -287,9 +305,6 @@ class Alternative(Base):
domain = db.relationship(Domain,
backref=db.backref('alternatives', cascade='all, delete-orphan'))
- def __str__(self):
- return str(self.name)
-
class Relay(Base):
""" Relayed mail domain.
@@ -302,9 +317,6 @@ class Relay(Base):
# TODO: String(80) is too small?
smtp = db.Column(db.String(80), nullable=True)
- def __str__(self):
- return str(self.name)
-
class Email(object):
""" Abstraction for an email address (localpart and domain).
@@ -312,11 +324,11 @@ class Email(object):
# TODO: validate max. total length of address (<=254)
- # TODO: String(80) is too large (>64)?
+ # TODO: String(80) is too large (64)?
localpart = db.Column(db.String(80), nullable=False)
@declarative.declared_attr
- def domain_name(self):
+ def domain_name(cls):
""" the domain part of the email address """
return db.Column(IdnaDomain, db.ForeignKey(Domain.name),
nullable=False, default=IdnaDomain)
@@ -325,13 +337,33 @@ class Email(object):
# It is however very useful for quick lookups without joining tables,
# especially when the mail server is reading the database.
@declarative.declared_attr
- def email(self):
+ def _email(cls):
""" the complete email address (localpart@domain) """
- updater = lambda ctx: '{localpart}@{domain_name}'.format(**ctx.current_parameters)
- return db.Column(IdnaEmail,
- primary_key=True, nullable=False,
- default=updater
- )
+
+ def updater(ctx):
+ key = f'{cls.__tablename__}_email'
+ if key in ctx.current_parameters:
+ return ctx.current_parameters[key]
+ return '{localpart}@{domain_name}'.format(**ctx.current_parameters)
+
+ return db.Column('email', IdnaEmail, primary_key=True, nullable=False, onupdate=updater)
+
+ # We need to keep email, localpart and domain_name in sync.
+ # But IMHO using email as primary key was not a good idea in the first place.
+ @hybrid_property
+ def email(self):
+ """ getter for email - gets _email """
+ return self._email
+
+ @email.setter
+ def email(self, value):
+ """ setter for email - sets _email, localpart and domain_name at once """
+ self.localpart, self.domain_name = value.rsplit('@', 1)
+ self._email = value
+
+ # hack for email declared attr - when _email is not updated yet
+ def __str__(self):
+ return str(f'{self.localpart}@{self.domain_name}')
def sendmail(self, subject, body):
""" send an email to the address """
@@ -391,9 +423,6 @@ class Email(object):
return None
- def __str__(self):
- return str(self.email)
-
class User(Base, Email):
""" A user is an email address that has a password to access a mailbox.
@@ -435,12 +464,10 @@ class User(Base, Email):
is_active = True
is_anonymous = False
- # TODO: remove unused user.get_id()
def get_id(self):
""" return users email address """
return self.email
- # TODO: remove unused user.destination
@property
def destination(self):
""" returns comma separated string of destinations """
@@ -471,17 +498,20 @@ class User(Base, Email):
'CRYPT': 'des_crypt',
}
- def _get_password_context(self):
+ @classmethod
+ def get_password_context(cls):
+ """ Create password context for hashing and verification
+ """
return passlib.context.CryptContext(
- schemes=self.scheme_dict.values(),
- default=self.scheme_dict[app.config['PASSWORD_SCHEME']],
+ schemes=cls.scheme_dict.values(),
+ default=cls.scheme_dict[app.config['PASSWORD_SCHEME']],
)
def check_password(self, plain):
""" Check password against stored hash
Update hash when default scheme has changed
"""
- context = self._get_password_context()
+ context = self.get_password_context()
hashed = re.match('^({[^}]+})?(.*)$', self.password).group(2)
result = context.verify(plain, hashed)
if result and context.identify(hashed) != context.default_scheme():
@@ -490,8 +520,6 @@ class User(Base, Email):
db.session.commit()
return result
- # TODO: remove kwarg hash_scheme - there is no point in setting a scheme,
- # when the next check updates the password to the default scheme.
def set_password(self, new, hash_scheme=None, raw=False):
""" Set password for user with specified encryption scheme
@new: plain text password to encrypt (or, if raw is True: the hash itself)
@@ -500,7 +528,7 @@ class User(Base, Email):
if hash_scheme is None:
hash_scheme = app.config['PASSWORD_SCHEME']
if not raw:
- new = self._get_password_context().encrypt(new, self.scheme_dict[hash_scheme])
+ new = self.get_password_context().encrypt(new, self.scheme_dict[hash_scheme])
self.password = f'{{{hash_scheme}}}{new}'
def get_managed_domains(self):
@@ -593,7 +621,7 @@ class Alias(Base, Email):
return None
-# TODO: what about API tokens?
+
class Token(Base):
""" A token is an application password for a given user.
"""
@@ -606,20 +634,19 @@ class Token(Base):
user = db.relationship(User,
backref=db.backref('tokens', cascade='all, delete-orphan'))
password = db.Column(db.String(255), nullable=False)
- # TODO: String(80) is too large?
+ # TODO: String(255) is too large? (43 should be sufficient)
ip = db.Column(db.String(255))
def check_password(self, password):
""" verifies password against stored hash """
return passlib.hash.sha256_crypt.verify(password, self.password)
- # TODO: use crypt context and default scheme from config?
def set_password(self, password):
""" sets password using sha256_crypt(rounds=1000) """
self.password = passlib.hash.sha256_crypt.using(rounds=1000).hash(password)
- def __str__(self):
- return str(self.comment or self.ip)
+ def __repr__(self):
+ return f''
class Fetch(Base):
@@ -644,8 +671,11 @@ class Fetch(Base):
last_check = db.Column(db.DateTime, nullable=True)
error = db.Column(db.String(1023), nullable=True)
- def __str__(self):
- return f'{self.protocol}{"s" if self.tls else ""}://{self.username}@{self.host}:{self.port}'
+ def __repr__(self):
+ return (
+ f''
+ )
class MailuConfig:
@@ -661,7 +691,7 @@ class MailuConfig:
def __init__(self, model : db.Model):
self.model = model
- def __str__(self):
+ def __repr__(self):
return f'<{self.model.__name__}-Collection>'
@cached_property
@@ -837,8 +867,8 @@ class MailuConfig:
if models is None or model in models:
db.session.query(model).delete()
- domains = MailuCollection(Domain)
- relays = MailuCollection(Relay)
- users = MailuCollection(User)
- aliases = MailuCollection(Alias)
+ domain = MailuCollection(Domain)
+ user = MailuCollection(User)
+ alias = MailuCollection(Alias)
+ relay = MailuCollection(Relay)
config = MailuCollection(Config)
diff --git a/core/admin/mailu/schemas.py b/core/admin/mailu/schemas.py
index 04512f6d..54a2e928 100644
--- a/core/admin/mailu/schemas.py
+++ b/core/admin/mailu/schemas.py
@@ -1,27 +1,66 @@
""" Mailu marshmallow fields and schema
"""
-import re
-
+from copy import deepcopy
from collections import OrderedDict
from textwrap import wrap
+import re
+import json
import yaml
+import sqlalchemy
+
from marshmallow import pre_load, post_load, post_dump, fields, Schema
+from marshmallow.utils import ensure_text_type
from marshmallow.exceptions import ValidationError
from marshmallow_sqlalchemy import SQLAlchemyAutoSchemaOpts
+
from flask_marshmallow import Marshmallow
+
from OpenSSL import crypto
+try:
+ from pygments import highlight
+ from pygments.token import Token
+ from pygments.lexers import get_lexer_by_name
+ from pygments.lexers.data import YamlLexer
+ from pygments.formatters import get_formatter_by_name
+except ModuleNotFoundError:
+ COLOR_SUPPORTED = False
+else:
+ COLOR_SUPPORTED = True
+
from . import models, dkim
ma = Marshmallow()
-# TODO: how and where to mark keys as "required" while unserializing (on commandline, in api)?
-# - fields without default => required
-# - fields which are the primary key => unchangeable when updating
+# TODO: how and where to mark keys as "required" while unserializing in api?
+# - when modifying, nothing is required (only the primary key, but this key is in the uri)
+# - the primary key from post data must not differ from the key in the uri
+# - when creating all fields without default or auto-increment are required
+# TODO: what about deleting list items and prung lists?
+# - domain.alternatives, user.forward_destination, user.manager_of, aliases.destination
+# TODO: validate everything!
+
+
+### class for hidden values ###
+
+class _Hidden:
+ def __bool__(self):
+ return False
+ def __copy__(self):
+ return self
+ def __deepcopy__(self, _):
+ return self
+ def __eq__(self, other):
+ return str(other) == ''
+ def __repr__(self):
+ return ''
+ __str__ = __repr__
+
+HIDDEN = _Hidden()
### map model to schema ###
@@ -41,13 +80,90 @@ def mapped(cls):
return cls
-### yaml render module ###
+### helper functions ###
+
+def get_fieldspec(exc):
+ """ walk traceback to extract spec of invalid field from marshmallow """
+ path = []
+ tbck = exc.__traceback__
+ while tbck:
+ if tbck.tb_frame.f_code.co_name == '_serialize':
+ if 'attr' in tbck.tb_frame.f_locals:
+ path.append(tbck.tb_frame.f_locals['attr'])
+ elif tbck.tb_frame.f_code.co_name == '_init_fields':
+ path = '.'.join(path)
+ spec = ', '.join([f'{path}.{key}' for key in tbck.tb_frame.f_locals['invalid_fields']])
+ return spec
+ tbck = tbck.tb_next
+ return None
+
+def colorize(data, lexer='yaml', formatter='terminal', color=None, strip=False):
+ """ add ANSI color to data """
+ if color is None:
+ # autodetect colorize
+ color = COLOR_SUPPORTED
+ if not color:
+ # no color wanted
+ return data
+ if not COLOR_SUPPORTED:
+ # want color, but not supported
+ raise ValueError('Please install pygments to colorize output')
+
+ scheme = {
+ Token: ('', ''),
+ Token.Name.Tag: ('cyan', 'brightcyan'),
+ Token.Literal.Scalar: ('green', 'green'),
+ Token.Literal.String: ('green', 'green'),
+ Token.Keyword.Constant: ('magenta', 'brightmagenta'),
+ Token.Literal.Number: ('magenta', 'brightmagenta'),
+ Token.Error: ('red', 'brightred'),
+ Token.Name: ('red', 'brightred'),
+ Token.Operator: ('red', 'brightred'),
+ }
+
+ class MyYamlLexer(YamlLexer):
+ """ colorize yaml constants and integers """
+ def get_tokens(self, text, unfiltered=False):
+ for typ, value in super().get_tokens(text, unfiltered):
+ if typ is Token.Literal.Scalar.Plain:
+ if value in {'true', 'false', 'null'}:
+ typ = Token.Keyword.Constant
+ elif value == HIDDEN:
+ typ = Token.Error
+ else:
+ try:
+ int(value, 10)
+ except ValueError:
+ try:
+ float(value)
+ except ValueError:
+ pass
+ else:
+ typ = Token.Literal.Number.Float
+ else:
+ typ = Token.Literal.Number.Integer
+ yield typ, value
+
+ res = highlight(
+ data,
+ MyYamlLexer() if lexer == 'yaml' else get_lexer_by_name(lexer),
+ get_formatter_by_name(formatter, colorscheme=scheme)
+ )
+
+ return res.rstrip('\n') if strip else res
+
+
+### render modules ###
# allow yaml module to dump OrderedDict
yaml.add_representer(
OrderedDict,
lambda cls, data: cls.represent_mapping('tag:yaml.org,2002:map', data.items())
)
+yaml.add_representer(
+ _Hidden,
+ lambda cls, data: cls.represent_data(str(data))
+)
class RenderYAML:
""" Marshmallow YAML Render Module
@@ -67,19 +183,19 @@ class RenderYAML:
return super().increase_indent(flow, False)
@staticmethod
- def _update_items(dict1, dict2):
- """ sets missing keys in dict1 to values of dict2
+ def _augment(kwargs, defaults):
+ """ add default kv's to kwargs if missing
"""
- for key, value in dict2.items():
- if key not in dict1:
- dict1[key] = value
+ for key, value in defaults.items():
+ if key not in kwargs:
+ kwargs[key] = value
_load_defaults = {}
@classmethod
def loads(cls, *args, **kwargs):
""" load yaml data from string
"""
- cls._update_items(kwargs, cls._load_defaults)
+ cls._augment(kwargs, cls._load_defaults)
return yaml.safe_load(*args, **kwargs)
_dump_defaults = {
@@ -90,13 +206,52 @@ class RenderYAML:
}
@classmethod
def dumps(cls, *args, **kwargs):
- """ dump yaml data to string
+ """ dump data to yaml string
"""
- cls._update_items(kwargs, cls._dump_defaults)
+ cls._augment(kwargs, cls._dump_defaults)
return yaml.dump(*args, **kwargs)
+class JSONEncoder(json.JSONEncoder):
+ """ JSONEncoder supporting serialization of HIDDEN """
+ def default(self, o):
+ """ serialize HIDDEN """
+ if isinstance(o, _Hidden):
+ return str(o)
+ return json.JSONEncoder.default(self, o)
-### field definitions ###
+class RenderJSON:
+ """ Marshmallow JSON Render Module
+ """
+
+ @staticmethod
+ def _augment(kwargs, defaults):
+ """ add default kv's to kwargs if missing
+ """
+ for key, value in defaults.items():
+ if key not in kwargs:
+ kwargs[key] = value
+
+ _load_defaults = {}
+ @classmethod
+ def loads(cls, *args, **kwargs):
+ """ load json data from string
+ """
+ cls._augment(kwargs, cls._load_defaults)
+ return json.loads(*args, **kwargs)
+
+ _dump_defaults = {
+ 'separators': (',',':'),
+ 'cls': JSONEncoder,
+ }
+ @classmethod
+ def dumps(cls, *args, **kwargs):
+ """ dump data to json string
+ """
+ cls._augment(kwargs, cls._dump_defaults)
+ return json.dumps(*args, **kwargs)
+
+
+### custom fields ###
class LazyStringField(fields.String):
""" Field that serializes a "false" value to the empty string
@@ -107,9 +262,8 @@ class LazyStringField(fields.String):
"""
return value if value else ''
-
class CommaSeparatedListField(fields.Raw):
- """ Field that deserializes a string containing comma-separated values to
+ """ Deserialize a string containing comma-separated values to
a list of strings
"""
@@ -129,10 +283,15 @@ class CommaSeparatedListField(fields.Raw):
class DkimKeyField(fields.String):
- """ Field that serializes a dkim key to a list of strings (lines) and
- deserializes a string or list of strings.
+ """ Serialize a dkim key to a list of strings (lines) and
+ Deserialize a string or list of strings to a valid dkim key
"""
+ default_error_messages = {
+ "invalid": "Not a valid string or list.",
+ "invalid_utf8": "Not a valid utf-8 string or list.",
+ }
+
_clean_re = re.compile(
r'(^-----BEGIN (RSA )?PRIVATE KEY-----|-----END (RSA )?PRIVATE KEY-----$|\s+)',
flags=re.UNICODE
@@ -156,11 +315,19 @@ class DkimKeyField(fields.String):
# convert list to str
if isinstance(value, list):
- value = ''.join(value)
+ try:
+ value = ''.join([ensure_text_type(item) for item in value])
+ except UnicodeDecodeError as exc:
+ raise self.make_error("invalid_utf8") from exc
- # only strings are allowed
- if not isinstance(value, str):
- raise ValidationError(f'invalid type {type(value).__name__!r}')
+ # only text is allowed
+ else:
+ if not isinstance(value, (str, bytes)):
+ raise self.make_error("invalid")
+ try:
+ value = ensure_text_type(value)
+ except UnicodeDecodeError as exc:
+ raise self.make_error("invalid_utf8") from exc
# clean value (remove whitespace and header/footer)
value = self._clean_re.sub('', value.strip())
@@ -189,28 +356,53 @@ class DkimKeyField(fields.String):
else:
return value
-
-### base definitions ###
-
-def handle_email(data):
- """ merge separate localpart and domain to email
+class PasswordField(fields.Str):
+ """ Serialize a hashed password hash by stripping the obsolete {SCHEME}
+ Deserialize a plain password or hashed password into a hashed password
"""
- localpart = 'localpart' in data
- domain = 'domain' in data
+ _hashes = {'PBKDF2', 'BLF-CRYPT', 'SHA512-CRYPT', 'SHA256-CRYPT', 'MD5-CRYPT', 'CRYPT'}
- if 'email' in data:
- if localpart or domain:
- raise ValidationError('duplicate email and localpart/domain')
- data['localpart'], data['domain_name'] = data['email'].rsplit('@', 1)
- elif localpart and domain:
- data['domain_name'] = data['domain']
- del data['domain']
- data['email'] = f'{data["localpart"]}@{data["domain_name"]}'
- elif localpart or domain:
- raise ValidationError('incomplete localpart/domain')
+ def _serialize(self, value, attr, obj, **kwargs):
+ """ strip obsolete {password-hash} when serializing """
+ # strip scheme spec if in database - it's obsolete
+ if value.startswith('{') and (end := value.find('}', 1)) >= 0:
+ if value[1:end] in self._hashes:
+ return value[end+1:]
+ return value
- return data
+ def _deserialize(self, value, attr, data, **kwargs):
+ """ hashes plain password or checks hashed password
+ also strips obsolete {password-hash} when deserializing
+ """
+
+ # when hashing is requested: use model instance to hash plain password
+ if data.get('hash_password'):
+ # hash password using model instance
+ inst = self.metadata['model']()
+ inst.set_password(value)
+ value = inst.password
+ del inst
+
+ # strip scheme spec when specified - it's obsolete
+ if value.startswith('{') and (end := value.find('}', 1)) >= 0:
+ if value[1:end] in self._hashes:
+ value = value[end+1:]
+
+ # check if algorithm is supported
+ inst = self.metadata['model'](password=value)
+ try:
+ # just check against empty string to see if hash is valid
+ inst.check_password('')
+ except ValueError as exc:
+ # ValueError: hash could not be identified
+ raise ValidationError(f'invalid password hash {value!r}') from exc
+ del inst
+
+ return value
+
+
+### base schema ###
class BaseOpts(SQLAlchemyAutoSchemaOpts):
""" Option class with sqla session
@@ -220,6 +412,8 @@ class BaseOpts(SQLAlchemyAutoSchemaOpts):
meta.sqla_session = models.db.session
if not hasattr(meta, 'ordered'):
meta.ordered = True
+ if not hasattr(meta, 'sibling'):
+ meta.sibling = False
super(BaseOpts, self).__init__(meta, ordered=ordered)
class BaseSchema(ma.SQLAlchemyAutoSchema):
@@ -231,10 +425,15 @@ class BaseSchema(ma.SQLAlchemyAutoSchema):
class Meta:
""" Schema config """
+ include_by_context = {}
+ exclude_by_value = {}
+ hide_by_context = {}
+ order = []
+ sibling = False
def __init__(self, *args, **kwargs):
- # context?
+ # get context
context = kwargs.get('context', {})
flags = {key for key, value in context.items() if value is True}
@@ -261,7 +460,7 @@ class BaseSchema(ma.SQLAlchemyAutoSchema):
# exclude default values
if not context.get('full'):
- for column in getattr(self.opts, 'model').__table__.columns:
+ for column in self.opts.model.__table__.columns:
if column.name not in exclude:
self._exclude_by_value.setdefault(column.name, []).append(
None if column.default is None else column.default.arg
@@ -274,45 +473,239 @@ class BaseSchema(ma.SQLAlchemyAutoSchema):
if not flags & set(need):
self._hide_by_context |= set(what)
+ # remember primary keys
+ self._primary = self.opts.model.__table__.primary_key.columns.values()[0].name
+
# initialize attribute order
if hasattr(self.Meta, 'order'):
# use user-defined order
- self._order = list(reversed(getattr(self.Meta, 'order')))
+ self._order = list(reversed(self.Meta.order))
else:
# default order is: primary_key + other keys alphabetically
self._order = list(sorted(self.fields.keys()))
- primary = self.opts.model.__table__.primary_key.columns.values()[0].name
- if primary in self._order:
- self._order.remove(primary)
+ if self._primary in self._order:
+ self._order.remove(self._primary)
self._order.reverse()
- self._order.append(primary)
+ self._order.append(self._primary)
# move pre_load hook "_track_import" to the front
hooks = self._hooks[('pre_load', False)]
- if '_track_import' in hooks:
- hooks.remove('_track_import')
- hooks.insert(0, '_track_import')
- # and post_load hook "_fooo" to the end
+ hooks.remove('_track_import')
+ hooks.insert(0, '_track_import')
+ # move pre_load hook "_add_instance" to the end
+ hooks.remove('_add_required')
+ hooks.append('_add_required')
+
+ # move post_load hook "_add_instance" to the end
hooks = self._hooks[('post_load', False)]
- if '_add_instance' in hooks:
- hooks.remove('_add_instance')
- hooks.append('_add_instance')
+ hooks.remove('_add_instance')
+ hooks.append('_add_instance')
+
+ def hide(self, data):
+ """ helper method to hide input data for logging """
+ # always returns a copy of data
+ return {
+ key: HIDDEN if key in self._hide_by_context else deepcopy(value)
+ for key, value in data.items()
+ }
+
+ def _call_and_store(self, *args, **kwargs):
+ """ track curent parent field for pruning """
+ self.context['parent_field'] = kwargs['field_name']
+ return super()._call_and_store(*args, **kwargs)
+
+ # this is only needed to work around the declared attr "email" primary key in model
+ def get_instance(self, data):
+ """ lookup item by defined primary key instead of key(s) from model """
+ if self.transient:
+ return None
+ if keys := getattr(self.Meta, 'primary_keys', None):
+ filters = {key: data.get(key) for key in keys}
+ if None not in filters.values():
+ return self.session.query(self.opts.model).filter_by(**filters).first()
+ return super().get_instance(data)
+
+ @pre_load(pass_many=True)
+ def _patch_input(self, items, many, **kwargs): # pylint: disable=unused-argument
+ """ - flush sqla session before serializing a section when requested
+ (make sure all objects that could be referred to later are created)
+ - when in update mode: patch input data before deserialization
+ - handle "prune" and "delete" items
+ - replace values in keys starting with '-' with default
+ """
+
+ # flush sqla session
+ if not self.Meta.sibling:
+ self.opts.sqla_session.flush()
+
+ # stop early when not updating
+ if not self.context.get('update'):
+ return items
+
+ # patch "delete", "prune" and "default"
+ want_prune = []
+ def patch(count, data, prune):
+
+ # don't allow __delete__ coming from input
+ if '__delete__' in data:
+ raise ValidationError('Unknown field.', f'{count}.__delete__')
+
+ # handle "prune list" and "delete item" (-pkey: none and -pkey: id)
+ for key in data:
+ if key.startswith('-'):
+ if key[1:] == self._primary:
+ # delete or prune
+ if data[key] is None:
+ # prune
+ prune.append(True)
+ return None
+ # mark item for deletion
+ return {key[1:]: data[key], '__delete__': True}
+
+ # handle "set to default value" (-key: none)
+ def set_default(key, value):
+ if not key.startswith('-'):
+ return (key, value)
+ key = key[1:]
+ if not key in self.opts.model.__table__.columns:
+ return (key, None)
+ if value is not None:
+ raise ValidationError(
+ 'When resetting to default value must be null.',
+ f'{count}.{key}'
+ )
+ value = self.opts.model.__table__.columns[key].default
+ if value is None:
+ raise ValidationError(
+ 'Field has no default value.',
+ f'{count}.{key}'
+ )
+ return (key, value.arg)
+
+ return dict([set_default(key, value) for key, value in data.items()])
+
+ # convert items to "delete" and filter "prune" item
+ items = [
+ item for item in [
+ patch(count, item, want_prune) for count, item in enumerate(items)
+ ] if item
+ ]
+
+ # prune: determine if existing items in db need to be added or marked for deletion
+ add_items = False
+ del_items = False
+ if self.Meta.sibling:
+ # parent prunes automatically
+ if not want_prune:
+ # no prune requested => add old items
+ add_items = True
+ else:
+ # parent does not prune automatically
+ if want_prune:
+ # prune requested => mark old items for deletion
+ del_items = True
+
+ if add_items or del_items:
+ existing = {item[self._primary] for item in items if self._primary in item}
+ for item in getattr(self.context['parent'], self.context['parent_field']):
+ key = getattr(item, self._primary)
+ if key not in existing:
+ if add_items:
+ items.append({self._primary: key})
+ else:
+ items.append({self._primary: key, '__delete__': True})
+
+ return items
@pre_load
def _track_import(self, data, many, **kwargs): # pylint: disable=unused-argument
-# TODO: also handle reset, prune and delete in pre_load / post_load hooks!
-# print('!!!', repr(data))
+ """ call callback function to track import
+ """
+ # callback
if callback := self.context.get('callback'):
callback(self, data)
+
return data
- @post_load
- def _add_instance(self, item, many, **kwargs): # pylint: disable=unused-argument
- self.opts.sqla_session.add(item)
+ @pre_load
+ def _add_required(self, data, many, **kwargs): # pylint: disable=unused-argument
+ """ when updating:
+ allow modification of existing items having required attributes
+ by loading existing value from db
+ """
+
+ if not self.opts.load_instance or not self.context.get('update'):
+ return data
+
+ # stabilize import of auto-increment primary keys (not required),
+ # by matching import data to existing items and setting primary key
+ if not self._primary in data:
+ for item in getattr(self.context['parent'], self.context['parent_field']):
+ existing = self.dump(item, many=False)
+ this = existing.pop(self._primary)
+ if data == existing:
+ instance = item
+ data[self._primary] = this
+ break
+
+ # try to load instance
+ instance = self.instance or self.get_instance(data)
+ if instance is None:
+
+ if '__delete__' in data:
+ # deletion of non-existent item requested
+ raise ValidationError(
+ f'item not found: {data[self._primary]!r}',
+ field_name=f'?.{self._primary}',
+ )
+
+ else:
+
+ if self.context.get('update'):
+ # remember instance as parent for pruning siblings
+ if not self.Meta.sibling:
+ self.context['parent'] = instance
+ # delete instance when marked
+ if '__delete__' in data:
+ self.opts.sqla_session.delete(instance)
+
+ # add attributes required for validation from db
+ # TODO: this will cause validation errors if value from database does not validate
+ for attr_name, field_obj in self.load_fields.items():
+ if field_obj.required and attr_name not in data:
+ data[attr_name] = getattr(instance, attr_name)
+
+ return data
+
+ @post_load(pass_original=True)
+ def _add_instance(self, item, original, many, **kwargs): # pylint: disable=unused-argument
+ """ add new instances to sqla session """
+
+ if item in self.opts.sqla_session:
+ # item was modified
+ if 'hash_password' in original:
+ # stabilize import of passwords to be hashed,
+ # by not re-hashing an unchanged password
+ if attr := getattr(sqlalchemy.inspect(item).attrs, 'password', None):
+ if attr.history.has_changes() and attr.history.deleted:
+ try:
+ # reset password hash, if password was not changed
+ inst = type(item)(password=attr.history.deleted[-1])
+ if inst.check_password(original['password']):
+ item.password = inst.password
+ except ValueError:
+ # hash in db is invalid
+ pass
+ else:
+ del inst
+ else:
+ # new item
+ self.opts.sqla_session.add(item)
return item
@post_dump
def _hide_and_order(self, data, many, **kwargs): # pylint: disable=unused-argument
+ """ hide secrets and order output """
# order output
for key in self._order:
@@ -325,15 +718,18 @@ class BaseSchema(ma.SQLAlchemyAutoSchema):
if not self._exclude_by_value and not self._hide_by_context:
return data
- # exclude items or hide values
+ # exclude or hide values
full = self.context.get('full')
return type(data)([
- (key, '' if key in self._hide_by_context else value)
+ (key, HIDDEN if key in self._hide_by_context else value)
for key, value in data.items()
if full or key not in self._exclude_by_value or value not in self._exclude_by_value[key]
])
- # TODO: remove LazyStringField and change model (IMHO comment should not be nullable)
+ # this field is used to mark items for deletion
+ mark_delete = fields.Boolean(data_key='__delete__', load_only=True)
+
+ # TODO: remove LazyStringField (when model was changed - IMHO comment should not be nullable)
comment = LazyStringField()
@@ -381,6 +777,11 @@ class TokenSchema(BaseSchema):
model = models.Token
load_instance = True
+ sibling = True
+
+ password = PasswordField(required=True, metadata={'model': models.User})
+ hash_password = fields.Boolean(load_only=True, missing=False)
+
@mapped
class FetchSchema(BaseSchema):
@@ -389,6 +790,8 @@ class FetchSchema(BaseSchema):
""" Schema config """
model = models.Fetch
load_instance = True
+
+ sibling = True
include_by_context = {
('full', 'import'): {'last_check', 'error'},
}
@@ -405,52 +808,25 @@ class UserSchema(BaseSchema):
model = models.User
load_instance = True
include_relationships = True
- exclude = ['domain', 'quota_bytes_used']
+ exclude = ['_email', 'domain', 'localpart', 'domain_name', 'quota_bytes_used']
+ primary_keys = ['email']
exclude_by_value = {
'forward_destination': [[]],
- 'tokens': [[]],
- 'fetches': [[]],
- 'manager_of': [[]],
- 'reply_enddate': ['2999-12-31'],
- 'reply_startdate': ['1900-01-01'],
+ 'tokens': [[]],
+ 'fetches': [[]],
+ 'manager_of': [[]],
+ 'reply_enddate': ['2999-12-31'],
+ 'reply_startdate': ['1900-01-01'],
}
- @pre_load
- def _handle_email_and_password(self, data, many, **kwargs): # pylint: disable=unused-argument
- data = handle_email(data)
- if 'password' in data:
- if 'password_hash' in data or 'hash_scheme' in data:
- raise ValidationError('ambigous key password and password_hash/hash_scheme')
- # check (hashed) password
- password = data['password']
- if password.startswith('{') and '}' in password:
- scheme = password[1:password.index('}')]
- if scheme not in self.Meta.model.scheme_dict:
- raise ValidationError(f'invalid password scheme {scheme!r}')
- else:
- raise ValidationError(f'invalid hashed password {password!r}')
- elif 'password_hash' in data and 'hash_scheme' in data:
- if data['hash_scheme'] not in self.Meta.model.scheme_dict:
- raise ValidationError(f'invalid password scheme {data["hash_scheme"]!r}')
- data['password'] = f'{{{data["hash_scheme"]}}}{data["password_hash"]}'
- del data['hash_scheme']
- del data['password_hash']
- return data
-
- # TODO: verify password (should this be done in model?)
- # scheme, hashed = re.match('^(?:{([^}]+)})?(.*)$', self.password).groups()
- # if not scheme...
- # ctx = passlib.context.CryptContext(schemes=[scheme], default=scheme)
- # try:
- # ctx.verify('', hashed)
- # =>? ValueError: hash could not be identified
-
- localpart = fields.Str(load_only=True)
- domain_name = fields.Str(load_only=True)
+ email = fields.String(required=True)
tokens = fields.Nested(TokenSchema, many=True)
fetches = fields.Nested(FetchSchema, many=True)
+ password = PasswordField(required=True, metadata={'model': models.User})
+ hash_password = fields.Boolean(load_only=True, missing=False)
+
@mapped
class AliasSchema(BaseSchema):
@@ -459,18 +835,14 @@ class AliasSchema(BaseSchema):
""" Schema config """
model = models.Alias
load_instance = True
- exclude = ['domain']
+ exclude = ['_email', 'domain', 'localpart', 'domain_name']
+ primary_keys = ['email']
exclude_by_value = {
'destination': [[]],
}
- @pre_load
- def _handle_email(self, data, many, **kwargs): # pylint: disable=unused-argument
- return handle_email(data)
-
- localpart = fields.Str(load_only=True)
- domain_name = fields.Str(load_only=True)
+ email = fields.String(required=True)
destination = CommaSeparatedListField()
@@ -499,7 +871,7 @@ class MailuSchema(Schema):
render_module = RenderYAML
ordered = True
- order = ['config', 'domains', 'users', 'aliases', 'relays']
+ order = ['domain', 'user', 'alias', 'relay'] # 'config'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
@@ -511,6 +883,14 @@ class MailuSchema(Schema):
except KeyError:
pass
+ def _call_and_store(self, *args, **kwargs):
+ """ track current parent and field for pruning """
+ self.context.update({
+ 'parent': self.context.get('config'),
+ 'parent_field': kwargs['field_name'],
+ })
+ return super()._call_and_store(*args, **kwargs)
+
@pre_load
def _clear_config(self, data, many, **kwargs): # pylint: disable=unused-argument
""" create config object in context if missing
@@ -534,8 +914,8 @@ class MailuSchema(Schema):
return config
- config = fields.Nested(ConfigSchema, many=True)
- domains = fields.Nested(DomainSchema, many=True)
- users = fields.Nested(UserSchema, many=True)
- aliases = fields.Nested(AliasSchema, many=True)
- relays = fields.Nested(RelaySchema, many=True)
+ domain = fields.Nested(DomainSchema, many=True)
+ user = fields.Nested(UserSchema, many=True)
+ alias = fields.Nested(AliasSchema, many=True)
+ relay = fields.Nested(RelaySchema, many=True)
+# config = fields.Nested(ConfigSchema, many=True)
diff --git a/docs/cli.rst b/docs/cli.rst
index 1b2ed14f..497cdfc5 100644
--- a/docs/cli.rst
+++ b/docs/cli.rst
@@ -10,8 +10,9 @@ Managing users and aliases can be done from CLI using commands:
* user
* user-import
* user-delete
-* config-dump
* config-update
+* config-export
+* config-import
alias
-----
@@ -69,104 +70,160 @@ user-delete
docker-compose exec admin flask mailu user-delete foo@example.net
-config-dump
------------
-
-The purpose of this command is to dump domain-, relay-, alias- and user-configuration to a YAML template.
-
-.. code-block:: bash
-
- # docker-compose exec admin flask mailu config-dump --help
-
- Usage: flask mailu config-dump [OPTIONS] [SECTIONS]...
-
- dump configuration as YAML-formatted data to stdout
-
- SECTIONS can be: domains, relays, users, aliases
-
- Options:
- -f, --full Include default attributes
- -s, --secrets Include secrets (dkim-key, plain-text / not hashed)
- -d, --dns Include dns records
- --help Show this message and exit.
-
-If you want to export secrets (dkim-keys, plain-text / not hashed) you have to add the ``--secrets`` option.
-Only non-default attributes are dumped. If you want to dump all attributes use ``--full``.
-To include dns records (mx, spf, dkim and dmarc) add the ``--dns`` option.
-Unless you specify some sections all sections are dumped by default.
-
-.. code-block:: bash
-
- docker-compose exec admin flask mailu config-dump > mail-config.yml
-
- docker-compose exec admin flask mailu config-dump --dns domains
-
config-update
-------------
-The purpose of this command is for importing domain-, relay-, alias- and user-configuration in bulk and synchronizing DB entries with an external YAML template.
+The sole purpose of this command is for importing users/aliases in bulk and synchronizing DB entries with external YAML template:
.. code-block:: bash
- # docker-compose exec admin flask mailu config-update --help
+ cat mail-config.yml | docker-compose exec -T admin flask mailu config-update --delete-objects
- Usage: flask mailu config-update [OPTIONS]
+where mail-config.yml looks like:
- sync configuration with data from YAML-formatted stdin
+.. code-block:: bash
- Options:
- -v, --verbose Increase verbosity
- -d, --delete-objects Remove objects not included in yaml
- -n, --dry-run Perform a trial run with no changes made
- --help Show this message and exit.
+ users:
+ - localpart: foo
+ domain: example.com
+ password_hash: klkjhumnzxcjkajahsdqweqqwr
+ hash_scheme: MD5-CRYPT
+ aliases:
+ - localpart: alias1
+ domain: example.com
+ destination: "user1@example.com,user2@example.com"
+
+without ``--delete-object`` option config-update will only add/update new values but will *not* remove any entries missing in provided YAML input.
+
+Users
+-----
+
+following are additional parameters that could be defined for users:
+
+* comment
+* quota_bytes
+* global_admin
+* enable_imap
+* enable_pop
+* forward_enabled
+* forward_destination
+* reply_enabled
+* reply_subject
+* reply_body
+* displayed_name
+* spam_enabled
+* spam_threshold
+
+Alias
+-----
+
+additional fields:
+
+* wildcard
+
+config-export
+-------------
+
+The purpose of this command is to export domain-, relay-, alias- and user-configuration in YAML or JSON format.
+
+.. code-block:: bash
+
+ # docker-compose exec admin flask mailu config-export --help
+
+ Usage: flask mailu config-export [OPTIONS] [FILTER]...
+
+ Export configuration as YAML or JSON to stdout or file
+
+ Options:
+ -f, --full Include attributes with default value.
+ -s, --secrets Include secret attributes (dkim-key, passwords).
+ -c, --color Force colorized output.
+ -d, --dns Include dns records.
+ -o, --output-file FILENAME Save configuration to file.
+ -j, --json Export configuration in json format.
+ -?, -h, --help Show this message and exit.
+
+Only non-default attributes are exported. If you want to export all attributes use ``--full``.
+If you want to export plain-text secrets (dkim-keys, passwords) you have to add the ``--secrets`` option.
+To include dns records (mx, spf, dkim and dmarc) add the ``--dns`` option.
+By default all configuration objects are exported (domain, user, alias, relay). You can specify
+filters to export only some objects or attributes (try: ``user`` or ``domain.name``).
+
+.. code-block:: bash
+
+ docker-compose exec admin flask mailu config-export -o mail-config.yml
+
+ docker-compose exec admin flask mailu config-export --dns domain.dns_mx domain.dns_spf
+
+config-import
+-------------
+
+The purpose of this command is for importing domain-, relay-, alias- and user-configuration in bulk and synchronizing DB entries with an external YAML/JOSN source.
+
+.. code-block:: bash
+
+ # docker-compose exec admin flask mailu config-import --help
+
+ Usage: flask mailu config-import [OPTIONS] [FILENAME|-]
+
+ Import configuration as YAML or JSON from stdin or file
+
+ Options:
+ -v, --verbose Increase verbosity.
+ -s, --secrets Show secret attributes in messages.
+ -q, --quiet Quiet mode - only show errors.
+ -c, --color Force colorized output.
+ -u, --update Update mode - merge input with existing config.
+ -n, --dry-run Perform a trial run with no changes made.
+ -?, -h, --help Show this message and exit.
The current version of docker-compose exec does not pass stdin correctly, so you have to user docker exec instead:
.. code-block:: bash
- docker exec -i $(docker-compose ps -q admin) flask mailu config-update -nvd < mail-config.yml
+ docker exec -i $(docker-compose ps -q admin) flask mailu config-import -nv < mail-config.yml
-
-mail-config.yml looks like this:
+mail-config.yml contains the configuration and looks like this:
.. code-block:: yaml
-
- domains:
+
+ domain:
- name: example.com
alternatives:
- alternative.example.com
- users:
+ user:
- email: foo@example.com
- password_hash: klkjhumnzxcjkajahsdqweqqwr
+ password_hash: '$2b$12$...'
hash_scheme: MD5-CRYPT
- aliases:
+ alias:
- email: alias1@example.com
- destination: "user1@example.com,user2@example.com"
+ destination:
+ - user1@example.com
+ - user2@example.com
- relays:
+ relay:
- name: relay.example.com
comment: test
smtp: mx.example.com
-You can use ``--dry-run`` to test your YAML without comitting any changes to the database.
-With ``--verbose`` config-update will show exactly what it changes in the database.
-Without ``--delete-object`` option config-update will only add/update changed values but will *not* remove any entries missing in provided YAML input.
+config-update shows the number of created/modified/deleted objects after import.
+To suppress all messages except error messages use ``--quiet``.
+By adding the ``--verbose`` switch (one or more times) the import gets more detailed and shows exactyl what attributes changed.
+In all messages plain-text secrets (dkim-keys, passwords) are hidden by default. Use ``--secrets`` to show secrets.
+If you want to test what would be done when importing use ``--dry-run``.
+By default config-update replaces the whole configuration. You can use ``--update`` to change the existing configuration instead.
+When updating you can add new and change existing objects.
+To delete an object use ``-key: value`` (To delete the domain example.com ``-name: example.com`` for example).
+To reset an attribute to default use ``-key: null`` (To reset enable_imap ``-enable_imap: null`` for example).
-This is a complete YAML template with all additional parameters that could be defined:
+This is a complete YAML template with all additional parameters that can be defined:
.. code-block:: yaml
- aliases:
- - email: email@example.com
- comment: ''
- destination:
- - address@example.com
- wildcard: false
-
- domains:
+ domain:
- name: example.com
alternatives:
- alternative.tld
@@ -176,13 +233,8 @@ This is a complete YAML template with all additional parameters that could be de
max_quota_bytes: 0
max_users: -1
signup_enabled: false
-
- relays:
- - name: relay.example.com
- comment: ''
- smtp: mx.example.com
-
- users:
+
+ user:
- email: postmaster@example.com
comment: ''
displayed_name: 'Postmaster'
@@ -192,13 +244,16 @@ This is a complete YAML template with all additional parameters that could be de
fetches:
- id: 1
comment: 'test fetch'
- username: fetch-user
+ error: null
host: other.example.com
+ keep: true
+ last_check: '2020-12-29T17:09:48.200179'
password: 'secret'
+ hash_password: true
port: 993
protocol: imap
tls: true
- keep: true
+ username: fetch-user
forward_destination:
- address@remote.example.com
forward_enabled: true
@@ -206,12 +261,13 @@ This is a complete YAML template with all additional parameters that could be de
global_admin: true
manager_of:
- example.com
- password: '{BLF-CRYPT}$2b$12$...'
+ password: '$2b$12$...'
+ hash_password: true
quota_bytes: 1000000000
reply_body: ''
reply_enabled: false
- reply_enddate: 2999-12-31
- reply_startdate: 1900-01-01
+ reply_enddate: '2999-12-31'
+ reply_startdate: '1900-01-01'
reply_subject: ''
spam_enabled: true
spam_threshold: 80
@@ -219,5 +275,16 @@ This is a complete YAML template with all additional parameters that could be de
- id: 1
comment: email-client
ip: 192.168.1.1
- password: '$5$rounds=1000$...'
+ password: '$5$rounds=1$...'
+ aliases:
+ - email: email@example.com
+ comment: ''
+ destination:
+ - address@example.com
+ wildcard: false
+
+ relay:
+ - name: relay.example.com
+ comment: ''
+ smtp: mx.example.com
diff --git a/tests/compose/core/02_forward_test.sh b/tests/compose/core/02_forward_test.sh
index 651e027c..595820cf 100755
--- a/tests/compose/core/02_forward_test.sh
+++ b/tests/compose/core/02_forward_test.sh
@@ -1,4 +1,4 @@
-cat << EOF | docker-compose -f tests/compose/core/docker-compose.yml exec -T admin flask mailu config-update --verbose
+cat << EOF | docker-compose -f tests/compose/core/docker-compose.yml exec -T admin flask mailu config-update -v 1
users:
- localpart: forwardinguser
password_hash: "\$1\$F2OStvi1\$Q8hBIHkdJpJkJn/TrMIZ9/"
@@ -10,7 +10,7 @@ EOF
python3 tests/forward_test.py
-cat << EOF | docker-compose -f tests/compose/core/docker-compose.yml exec -T admin flask mailu config-update --verbose
+cat << EOF | docker-compose -f tests/compose/core/docker-compose.yml exec -T admin flask mailu config-update -v 1
users:
- localpart: forwardinguser
password_hash: "\$1\$F2OStvi1\$Q8hBIHkdJpJkJn/TrMIZ9/"
diff --git a/tests/compose/core/03_alias_test.sh b/tests/compose/core/03_alias_test.sh
index 2d40903a..dce1918a 100755
--- a/tests/compose/core/03_alias_test.sh
+++ b/tests/compose/core/03_alias_test.sh
@@ -1,4 +1,4 @@
-cat << EOF | docker-compose -f tests/compose/core/docker-compose.yml exec -T admin flask mailu config-update --verbose
+cat << EOF | docker-compose -f tests/compose/core/docker-compose.yml exec -T admin flask mailu config-update -v 1
aliases:
- localpart: alltheusers
domain: mailu.io
@@ -7,6 +7,6 @@ EOF
python3 tests/alias_test.py
-cat << EOF | docker-compose -f tests/compose/core/docker-compose.yml exec -T admin flask mailu config-update --verbose
+cat << EOF | docker-compose -f tests/compose/core/docker-compose.yml exec -T admin flask mailu config-update -v 1
aliases: []
EOF
diff --git a/tests/compose/core/04_reply_test.sh b/tests/compose/core/04_reply_test.sh
index 7615a0f8..83c114f6 100755
--- a/tests/compose/core/04_reply_test.sh
+++ b/tests/compose/core/04_reply_test.sh
@@ -1,4 +1,4 @@
-cat << EOF | docker-compose -f tests/compose/core/docker-compose.yml exec -T admin flask mailu config-update --verbose
+cat << EOF | docker-compose -f tests/compose/core/docker-compose.yml exec -T admin flask mailu config-update -v 1
users:
- localpart: replyuser
password_hash: "\$1\$F2OStvi1\$Q8hBIHkdJpJkJn/TrMIZ9/"
@@ -11,7 +11,7 @@ EOF
python3 tests/reply_test.py
-cat << EOF | docker-compose -f tests/compose/core/docker-compose.yml exec -T admin flask mailu config-update --verbose
+cat << EOF | docker-compose -f tests/compose/core/docker-compose.yml exec -T admin flask mailu config-update -v 1
users:
- localpart: replyuser
password_hash: "\$1\$F2OStvi1\$Q8hBIHkdJpJkJn/TrMIZ9/"
diff --git a/towncrier/newsfragments/1604.feature b/towncrier/newsfragments/1604.feature
index 06ee0beb..2b47791a 100644
--- a/towncrier/newsfragments/1604.feature
+++ b/towncrier/newsfragments/1604.feature
@@ -1 +1 @@
-Added cli command config-dump and enhanced config-update
+Add cli commands config-import and config-export
From 3937986e76f7079eb4488f12164f83f1966cb0ec Mon Sep 17 00:00:00 2001
From: Alexander Graf
Date: Mon, 15 Feb 2021 10:01:35 +0100
Subject: [PATCH 056/181] Convert OrderedDict to dict for output
---
core/admin/mailu/manage.py | 16 ++++++++++++----
1 file changed, 12 insertions(+), 4 deletions(-)
diff --git a/core/admin/mailu/manage.py b/core/admin/mailu/manage.py
index a8d1d3cb..37d91f33 100644
--- a/core/admin/mailu/manage.py
+++ b/core/admin/mailu/manage.py
@@ -7,7 +7,7 @@ import socket
import logging
import uuid
-from collections import Counter
+from collections import Counter, OrderedDict
from itertools import chain
import click
@@ -396,11 +396,19 @@ def config_import(verbose=0, secrets=False, quiet=False, color=False, update=Fal
return chain(message, changes)
def log(action, target, message=None):
+
+ def od2d(val):
+ """ converts OrderedDicts to Dict for logging purposes """
+ if isinstance(val, OrderedDict):
+ return {k: od2d(v) for k, v in val.items()}
+ elif isinstance(val, list):
+ return [od2d(v) for v in val]
+ else:
+ return val
+
if message is None:
- # TODO: convert nested OrderedDict to dict
- # see: flask mailu config-import -nvv yaml/dump4.yaml
try:
- message = dict(logger[target.__class__].dump(target))
+ message = od2d(logger[target.__class__].dump(target))
except KeyError:
message = target
if not isinstance(message, str):
From 8929912dea9c061632a3bde53726e0949ad455b9 Mon Sep 17 00:00:00 2001
From: Alexander Graf
Date: Mon, 15 Feb 2021 21:56:58 +0100
Subject: [PATCH 057/181] remove OrderedDict - not necessary in python>=3.7
---
core/admin/mailu/manage.py | 24 ++++++------------
core/admin/mailu/schemas.py | 50 +++++++++++++------------------------
2 files changed, 25 insertions(+), 49 deletions(-)
diff --git a/core/admin/mailu/manage.py b/core/admin/mailu/manage.py
index 37d91f33..a20c7d6d 100644
--- a/core/admin/mailu/manage.py
+++ b/core/admin/mailu/manage.py
@@ -7,7 +7,7 @@ import socket
import logging
import uuid
-from collections import Counter, OrderedDict
+from collections import Counter
from itertools import chain
import click
@@ -397,18 +397,9 @@ def config_import(verbose=0, secrets=False, quiet=False, color=False, update=Fal
def log(action, target, message=None):
- def od2d(val):
- """ converts OrderedDicts to Dict for logging purposes """
- if isinstance(val, OrderedDict):
- return {k: od2d(v) for k, v in val.items()}
- elif isinstance(val, list):
- return [od2d(v) for v in val]
- else:
- return val
-
if message is None:
try:
- message = od2d(logger[target.__class__].dump(target))
+ message = logger[target.__class__].dump(target)
except KeyError:
message = target
if not isinstance(message, str):
@@ -536,12 +527,11 @@ def config_import(verbose=0, secrets=False, quiet=False, color=False, update=Fal
except Exception as exc:
if verbose >= 5:
raise
- else:
- # (yaml.scanner.ScannerError, UnicodeDecodeError, ...)
- raise click.ClickException(
- f'[{exc.__class__.__name__}] '
- f'{" ".join(str(exc).split())}'
- ) from exc
+ # (yaml.scanner.ScannerError, UnicodeDecodeError, ...)
+ raise click.ClickException(
+ f'[{exc.__class__.__name__}] '
+ f'{" ".join(str(exc).split())}'
+ ) from exc
# flush session to show/count all changes
if dry_run or verbose >= 1:
diff --git a/core/admin/mailu/schemas.py b/core/admin/mailu/schemas.py
index 54a2e928..8e91b4aa 100644
--- a/core/admin/mailu/schemas.py
+++ b/core/admin/mailu/schemas.py
@@ -2,7 +2,6 @@
"""
from copy import deepcopy
-from collections import OrderedDict
from textwrap import wrap
import re
@@ -155,11 +154,7 @@ def colorize(data, lexer='yaml', formatter='terminal', color=None, strip=False):
### render modules ###
-# allow yaml module to dump OrderedDict
-yaml.add_representer(
- OrderedDict,
- lambda cls, data: cls.represent_mapping('tag:yaml.org,2002:map', data.items())
-)
+# allow yaml to represent hidden attributes
yaml.add_representer(
_Hidden,
lambda cls, data: cls.represent_data(str(data))
@@ -410,8 +405,6 @@ class BaseOpts(SQLAlchemyAutoSchemaOpts):
def __init__(self, meta, ordered=False):
if not hasattr(meta, 'sqla_session'):
meta.sqla_session = models.db.session
- if not hasattr(meta, 'ordered'):
- meta.ordered = True
if not hasattr(meta, 'sibling'):
meta.sibling = False
super(BaseOpts, self).__init__(meta, ordered=ordered)
@@ -474,19 +467,23 @@ class BaseSchema(ma.SQLAlchemyAutoSchema):
self._hide_by_context |= set(what)
# remember primary keys
- self._primary = self.opts.model.__table__.primary_key.columns.values()[0].name
+ self._primary = str(self.opts.model.__table__.primary_key.columns.values()[0].name)
- # initialize attribute order
+ # determine attribute order
if hasattr(self.Meta, 'order'):
# use user-defined order
- self._order = list(reversed(self.Meta.order))
+ order = self.Meta.order
else:
# default order is: primary_key + other keys alphabetically
- self._order = list(sorted(self.fields.keys()))
- if self._primary in self._order:
- self._order.remove(self._primary)
- self._order.reverse()
- self._order.append(self._primary)
+ order = list(sorted(self.fields.keys()))
+ if self._primary in order:
+ order.remove(self._primary)
+ order.insert(0, self._primary)
+
+ # order dump_fields
+ for field in order:
+ if field in self.dump_fields:
+ self.dump_fields[field] = self.dump_fields.pop(field)
# move pre_load hook "_track_import" to the front
hooks = self._hooks[('pre_load', False)]
@@ -704,16 +701,9 @@ class BaseSchema(ma.SQLAlchemyAutoSchema):
return item
@post_dump
- def _hide_and_order(self, data, many, **kwargs): # pylint: disable=unused-argument
+ def _hide_values(self, data, many, **kwargs): # pylint: disable=unused-argument
""" hide secrets and order output """
- # order output
- for key in self._order:
- try:
- data.move_to_end(key, False)
- except KeyError:
- pass
-
# stop early when not excluding/hiding
if not self._exclude_by_value and not self._hide_by_context:
return data
@@ -870,18 +860,14 @@ class MailuSchema(Schema):
""" Schema config """
render_module = RenderYAML
- ordered = True
order = ['domain', 'user', 'alias', 'relay'] # 'config'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
- # order fields
- for field_list in self.load_fields, self.dump_fields, self.fields:
- for section in reversed(self.Meta.order):
- try:
- field_list.move_to_end(section, False)
- except KeyError:
- pass
+ # order dump_fields
+ for field in self.Meta.order:
+ if field in self.dump_fields:
+ self.dump_fields[field] = self.dump_fields.pop(field)
def _call_and_store(self, *args, **kwargs):
""" track current parent and field for pruning """
From 70a1c79f81d4ceecba42e541947b14a2e9980bff Mon Sep 17 00:00:00 2001
From: Alexander Graf
Date: Mon, 15 Feb 2021 22:57:37 +0100
Subject: [PATCH 058/181] handle prune and delete for lists and backrefs
---
core/admin/mailu/manage.py | 13 ++++++++---
core/admin/mailu/schemas.py | 43 ++++++++++++++++++++++++++++++++++---
2 files changed, 50 insertions(+), 6 deletions(-)
diff --git a/core/admin/mailu/manage.py b/core/admin/mailu/manage.py
index a20c7d6d..05eae010 100644
--- a/core/admin/mailu/manage.py
+++ b/core/admin/mailu/manage.py
@@ -478,9 +478,16 @@ def config_import(verbose=0, secrets=False, quiet=False, color=False, update=Fal
if verbose >= 1:
log('Modified', target, f'{str(target)!r} dkim_key: {before!r} -> {after!r}')
- def track_serialize(obj, item):
+ def track_serialize(obj, item, backref=None):
""" callback function to track import """
- # hide secrets
+ # called for backref modification?
+ if backref is not None:
+ log('Modified', item, '{target!r} {key}: {before!r} -> {after!r}'.format(**backref))
+ return
+ # verbose?
+ if not verbose >= 2:
+ return
+ # hide secrets in data
data = logger[obj.opts.model].hide(item)
if 'hash_password' in data:
data['password'] = HIDDEN
@@ -501,7 +508,7 @@ def config_import(verbose=0, secrets=False, quiet=False, color=False, update=Fal
'import': True,
'update': update,
'clear': not update,
- 'callback': track_serialize if verbose >= 2 else None,
+ 'callback': track_serialize,
}
# register listeners
diff --git a/core/admin/mailu/schemas.py b/core/admin/mailu/schemas.py
index 8e91b4aa..3e15ee26 100644
--- a/core/admin/mailu/schemas.py
+++ b/core/admin/mailu/schemas.py
@@ -14,6 +14,7 @@ from marshmallow import pre_load, post_load, post_dump, fields, Schema
from marshmallow.utils import ensure_text_type
from marshmallow.exceptions import ValidationError
from marshmallow_sqlalchemy import SQLAlchemyAutoSchemaOpts
+from marshmallow_sqlalchemy.fields import RelatedList
from flask_marshmallow import Marshmallow
@@ -39,8 +40,6 @@ ma = Marshmallow()
# - when modifying, nothing is required (only the primary key, but this key is in the uri)
# - the primary key from post data must not differ from the key in the uri
# - when creating all fields without default or auto-increment are required
-# TODO: what about deleting list items and prung lists?
-# - domain.alternatives, user.forward_destination, user.manager_of, aliases.destination
# TODO: validate everything!
@@ -652,7 +651,7 @@ class BaseSchema(ma.SQLAlchemyAutoSchema):
if '__delete__' in data:
# deletion of non-existent item requested
raise ValidationError(
- f'item not found: {data[self._primary]!r}',
+ f'item to delete not found: {data[self._primary]!r}',
field_name=f'?.{self._primary}',
)
@@ -665,6 +664,44 @@ class BaseSchema(ma.SQLAlchemyAutoSchema):
# delete instance when marked
if '__delete__' in data:
self.opts.sqla_session.delete(instance)
+ # delete item from lists or prune lists
+ # currently: domain.alternatives, user.forward_destination,
+ # user.manager_of, aliases.destination
+ for key, value in data.items():
+ if isinstance(value, list):
+ new_value = set(value)
+ # handle list pruning
+ if '-prune-' in value:
+ value.remove('-prune-')
+ new_value.remove('-prune-')
+ else:
+ for old in getattr(instance, key):
+ # using str() is okay for now (see above)
+ new_value.add(str(old))
+ # handle item deletion
+ for item in value:
+ if item.startswith('-'):
+ new_value.remove(item)
+ try:
+ new_value.remove(item[1:])
+ except KeyError as exc:
+ raise ValidationError(
+ f'item to delete not found: {item[1:]!r}',
+ field_name=f'?.{key}',
+ ) from exc
+ # deduplicate and sort list
+ data[key] = sorted(new_value)
+ # log backref modification not catched by hook
+ if isinstance(self.fields[key], RelatedList):
+ if callback := self.context.get('callback'):
+ callback(self, instance, {
+ 'key': key,
+ 'target': str(instance),
+ 'before': [str(v) for v in getattr(instance, key)],
+ 'after': data[key],
+ })
+
+
# add attributes required for validation from db
# TODO: this will cause validation errors if value from database does not validate
From 1e2b5f26ab9fed0db8994ca54db7f57ce0792ce8 Mon Sep 17 00:00:00 2001
From: Alexander Graf
Date: Tue, 16 Feb 2021 13:34:02 +0100
Subject: [PATCH 059/181] don't handle nested lists
---
core/admin/mailu/schemas.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/core/admin/mailu/schemas.py b/core/admin/mailu/schemas.py
index 3e15ee26..b9b8e393 100644
--- a/core/admin/mailu/schemas.py
+++ b/core/admin/mailu/schemas.py
@@ -668,7 +668,7 @@ class BaseSchema(ma.SQLAlchemyAutoSchema):
# currently: domain.alternatives, user.forward_destination,
# user.manager_of, aliases.destination
for key, value in data.items():
- if isinstance(value, list):
+ if not isinstance(self.fields[key], fields.Nested) and isinstance(value, list):
new_value = set(value)
# handle list pruning
if '-prune-' in value:
From 10435114ec0206e3558734634d906cffbd49e783 Mon Sep 17 00:00:00 2001
From: Alexander Graf
Date: Tue, 16 Feb 2021 15:36:01 +0100
Subject: [PATCH 060/181] updated remarks and docs
---
core/admin/mailu/manage.py | 25 ++++++++++++---------
core/admin/mailu/models.py | 12 +++++-----
core/admin/mailu/schemas.py | 3 ++-
docs/cli.rst | 45 ++++++++++++++++++++++++-------------
4 files changed, 51 insertions(+), 34 deletions(-)
diff --git a/core/admin/mailu/manage.py b/core/admin/mailu/manage.py
index 05eae010..756400ad 100644
--- a/core/admin/mailu/manage.py
+++ b/core/admin/mailu/manage.py
@@ -337,16 +337,19 @@ def config_import(verbose=0, secrets=False, quiet=False, color=False, update=Fal
"""
# verbose
- # 0 : show number of changes
- # 1 : also show changes
- # 2 : also show secrets
- # 3 : also show input data
- # 4 : also show sql queries
- # 5 : also show tracebacks
+ # 0 : only show number of changes
+ # 1 : also show detailed changes
+ # 2 : also show input data
+ # 3 : also show sql queries (also needs -s, as sql may contain secrets)
+ # 4 : also show tracebacks (also needs -s, as tracebacks may contain secrets)
if quiet:
verbose = -1
+ if verbose > 2 and not secrets:
+ print('[Warning] Verbosity level capped to 2. Specify --secrets to log sql and tracebacks.')
+ verbose = 2
+
color_cfg = {
'color': color or sys.stdout.isatty(),
'lexer': 'python',
@@ -376,7 +379,7 @@ def config_import(verbose=0, secrets=False, quiet=False, color=False, update=Fal
fmt = f' - {{:<{max([len(loc) for loc, msg in res])}}} : {{}}'
res = [fmt.format(loc, msg) for loc, msg in res]
num = f'error{["s",""][len(res)==1]}'
- res.insert(0, f'[ValidationError] {len(res)} {num} occured during input validation')
+ res.insert(0, f'[ValidationError] {len(res)} {num} occurred during input validation')
return '\n'.join(res)
@@ -484,7 +487,7 @@ def config_import(verbose=0, secrets=False, quiet=False, color=False, update=Fal
if backref is not None:
log('Modified', item, '{target!r} {key}: {before!r} -> {after!r}'.format(**backref))
return
- # verbose?
+ # show input data?
if not verbose >= 2:
return
# hide secrets in data
@@ -532,7 +535,7 @@ def config_import(verbose=0, secrets=False, quiet=False, color=False, update=Fal
except ValidationError as exc:
raise click.ClickException(format_errors(exc.messages)) from exc
except Exception as exc:
- if verbose >= 5:
+ if verbose >= 3:
raise
# (yaml.scanner.ScannerError, UnicodeDecodeError, ...)
raise click.ClickException(
@@ -584,7 +587,7 @@ def config_export(full=False, secrets=False, color=False, dns=False, output=None
if only:
for spec in only:
if spec.split('.', 1)[0] not in MailuSchema.Meta.order:
- raise click.ClickException(f'[ERROR] Unknown section: {spec}')
+ raise click.ClickException(f'[ValidationError] Unknown section: {spec}')
else:
only = MailuSchema.Meta.order
@@ -606,7 +609,7 @@ def config_export(full=False, secrets=False, color=False, dns=False, output=None
print(colorize(schema.dumps(models.MailuConfig()), **color_cfg), file=output)
except ValueError as exc:
if spec := get_fieldspec(exc):
- raise click.ClickException(f'[ERROR] Invalid filter: {spec}') from exc
+ raise click.ClickException(f'[ValidationError] Invalid filter: {spec}') from exc
raise
diff --git a/core/admin/mailu/models.py b/core/admin/mailu/models.py
index 5799e282..4c119984 100644
--- a/core/admin/mailu/models.py
+++ b/core/admin/mailu/models.py
@@ -33,7 +33,7 @@ class IdnaDomain(db.TypeDecorator):
""" Stores a Unicode string in it's IDNA representation (ASCII only)
"""
- # TODO: String(80) is too small?
+ # TODO: use db.String(255)?
impl = db.String(80)
def process_bind_param(self, value, dialect):
@@ -50,7 +50,7 @@ class IdnaEmail(db.TypeDecorator):
""" Stores a Unicode string in it's IDNA representation (ASCII only)
"""
- # TODO: String(255) is too small?
+ # TODO: use db.String(254)?
impl = db.String(255)
def process_bind_param(self, value, dialect):
@@ -314,7 +314,7 @@ class Relay(Base):
__tablename__ = 'relay'
name = db.Column(IdnaDomain, primary_key=True, nullable=False)
- # TODO: String(80) is too small?
+ # TODO: use db.String(266)? transport(8):(1)[nexthop(255)](2)
smtp = db.Column(db.String(80), nullable=True)
@@ -322,9 +322,7 @@ class Email(object):
""" Abstraction for an email address (localpart and domain).
"""
- # TODO: validate max. total length of address (<=254)
-
- # TODO: String(80) is too large (64)?
+ # TODO: use db.String(64)?
localpart = db.Column(db.String(80), nullable=False)
@declarative.declared_attr
@@ -634,7 +632,7 @@ class Token(Base):
user = db.relationship(User,
backref=db.backref('tokens', cascade='all, delete-orphan'))
password = db.Column(db.String(255), nullable=False)
- # TODO: String(255) is too large? (43 should be sufficient)
+ # TODO: use db.String(32)?
ip = db.Column(db.String(255))
def check_password(self, password):
diff --git a/core/admin/mailu/schemas.py b/core/admin/mailu/schemas.py
index b9b8e393..7d0393f0 100644
--- a/core/admin/mailu/schemas.py
+++ b/core/admin/mailu/schemas.py
@@ -36,7 +36,7 @@ from . import models, dkim
ma = Marshmallow()
-# TODO: how and where to mark keys as "required" while unserializing in api?
+# TODO: how and where to mark keys as "required" while deserializing in api?
# - when modifying, nothing is required (only the primary key, but this key is in the uri)
# - the primary key from post data must not differ from the key in the uri
# - when creating all fields without default or auto-increment are required
@@ -705,6 +705,7 @@ class BaseSchema(ma.SQLAlchemyAutoSchema):
# add attributes required for validation from db
# TODO: this will cause validation errors if value from database does not validate
+ # but there should not be an invalid value in the database
for attr_name, field_obj in self.load_fields.items():
if field_obj.required and attr_name not in data:
data[attr_name] = getattr(instance, attr_name)
diff --git a/docs/cli.rst b/docs/cli.rst
index 497cdfc5..6d48c576 100644
--- a/docs/cli.rst
+++ b/docs/cli.rst
@@ -97,7 +97,7 @@ where mail-config.yml looks like:
without ``--delete-object`` option config-update will only add/update new values but will *not* remove any entries missing in provided YAML input.
Users
------
+^^^^^
following are additional parameters that could be defined for users:
@@ -116,7 +116,7 @@ following are additional parameters that could be defined for users:
* spam_threshold
Alias
------
+^^^^^
additional fields:
@@ -125,11 +125,11 @@ additional fields:
config-export
-------------
-The purpose of this command is to export domain-, relay-, alias- and user-configuration in YAML or JSON format.
+The purpose of this command is to export the complete configuration in YAML or JSON format.
.. code-block:: bash
- # docker-compose exec admin flask mailu config-export --help
+ $ docker-compose exec admin flask mailu config-export --help
Usage: flask mailu config-export [OPTIONS] [FILTER]...
@@ -152,18 +152,18 @@ filters to export only some objects or attributes (try: ``user`` or ``domain.nam
.. code-block:: bash
- docker-compose exec admin flask mailu config-export -o mail-config.yml
+ $ docker-compose exec admin flask mailu config-export -o mail-config.yml
- docker-compose exec admin flask mailu config-export --dns domain.dns_mx domain.dns_spf
+ $ docker-compose exec admin flask mailu config-export --dns domain.dns_mx domain.dns_spf
config-import
-------------
-The purpose of this command is for importing domain-, relay-, alias- and user-configuration in bulk and synchronizing DB entries with an external YAML/JOSN source.
+This command imports configuration data from an external YAML or JSON source.
.. code-block:: bash
- # docker-compose exec admin flask mailu config-import --help
+ $ docker-compose exec admin flask mailu config-import --help
Usage: flask mailu config-import [OPTIONS] [FILENAME|-]
@@ -211,13 +211,28 @@ mail-config.yml contains the configuration and looks like this:
config-update shows the number of created/modified/deleted objects after import.
To suppress all messages except error messages use ``--quiet``.
-By adding the ``--verbose`` switch (one or more times) the import gets more detailed and shows exactyl what attributes changed.
-In all messages plain-text secrets (dkim-keys, passwords) are hidden by default. Use ``--secrets`` to show secrets.
-If you want to test what would be done when importing use ``--dry-run``.
-By default config-update replaces the whole configuration. You can use ``--update`` to change the existing configuration instead.
-When updating you can add new and change existing objects.
-To delete an object use ``-key: value`` (To delete the domain example.com ``-name: example.com`` for example).
-To reset an attribute to default use ``-key: null`` (To reset enable_imap ``-enable_imap: null`` for example).
+By adding the ``--verbose`` switch (up to two times) the import gets more detailed and shows exactly what attributes changed.
+In all log messages plain-text secrets (dkim-keys, passwords) are hidden by default. Use ``--secrets`` to log secrets.
+If you want to test what would be done when importing without committing any changes, use ``--dry-run``.
+
+By default config-update replaces the whole configuration. ``--update`` allows to modify the existing configuration instead.
+New elements will be added and existing elements will be modified.
+It is possible to delete a single element or prune all elements from lists and associative arrays using a special notation:
+
++-----------------------------+------------------+--------------------------+
+| Delete what? | notation | example |
++=============================+==================+==========================+
+| specific array object | ``- -key: id`` | ``- -name: example.com`` |
++-----------------------------+------------------+--------------------------+
+| specific list item | ``- -id`` | ``- -user1@example.com`` |
++-----------------------------+------------------+--------------------------+
+| all remaining array objects | ``- -key: null`` | ``- -email: null`` |
++-----------------------------+------------------+--------------------------+
+| all remaining list items | ``- -prune-`` | ``- -prune-`` |
++-----------------------------+------------------+--------------------------+
+
+The ``-key: null`` notation can also be used to reset an attribute to its default.
+To reset *spam_threshold* to it's default *80* use ``-spam_threshold: null``.
This is a complete YAML template with all additional parameters that can be defined:
From e4c83e162dfa37189b8eac506b07ec6f78a5b5d0 Mon Sep 17 00:00:00 2001
From: Alexander Graf
Date: Tue, 16 Feb 2021 17:59:43 +0100
Subject: [PATCH 061/181] fixed colorize auto detection
---
core/admin/mailu/manage.py | 6 +++---
core/admin/mailu/schemas.py | 8 ++++----
2 files changed, 7 insertions(+), 7 deletions(-)
diff --git a/core/admin/mailu/manage.py b/core/admin/mailu/manage.py
index 756400ad..bef49faa 100644
--- a/core/admin/mailu/manage.py
+++ b/core/admin/mailu/manage.py
@@ -19,7 +19,7 @@ from flask.cli import FlaskGroup, with_appcontext
from marshmallow.exceptions import ValidationError
from . import models
-from .schemas import MailuSchema, get_schema, get_fieldspec, colorize, RenderJSON, HIDDEN
+from .schemas import MailuSchema, get_schema, get_fieldspec, colorize, canColorize, RenderJSON, HIDDEN
db = models.db
@@ -351,7 +351,7 @@ def config_import(verbose=0, secrets=False, quiet=False, color=False, update=Fal
verbose = 2
color_cfg = {
- 'color': color or sys.stdout.isatty(),
+ 'color': color or (canColorize and sys.stdout.isatty()),
'lexer': 'python',
'strip': True,
}
@@ -598,7 +598,7 @@ def config_export(full=False, secrets=False, color=False, dns=False, output=None
}
schema = MailuSchema(only=only, context=context)
- color_cfg = {'color': color or output.isatty()}
+ color_cfg = {'color': color or (canColorize and output.isatty())}
if as_json:
schema.opts.render_module = RenderJSON
diff --git a/core/admin/mailu/schemas.py b/core/admin/mailu/schemas.py
index 7d0393f0..6a8303c5 100644
--- a/core/admin/mailu/schemas.py
+++ b/core/admin/mailu/schemas.py
@@ -27,9 +27,9 @@ try:
from pygments.lexers.data import YamlLexer
from pygments.formatters import get_formatter_by_name
except ModuleNotFoundError:
- COLOR_SUPPORTED = False
+ canColorize = False
else:
- COLOR_SUPPORTED = True
+ canColorize = True
from . import models, dkim
@@ -99,11 +99,11 @@ def colorize(data, lexer='yaml', formatter='terminal', color=None, strip=False):
""" add ANSI color to data """
if color is None:
# autodetect colorize
- color = COLOR_SUPPORTED
+ color = canColorize
if not color:
# no color wanted
return data
- if not COLOR_SUPPORTED:
+ if not canColorize:
# want color, but not supported
raise ValueError('Please install pygments to colorize output')
From bde7a2b6c4a8a2351b461cee9be413d7683e95dd Mon Sep 17 00:00:00 2001
From: Alexander Graf
Date: Fri, 19 Feb 2021 18:01:02 +0100
Subject: [PATCH 062/181] moved import logging to schema
- yaml-import is now logged via schema.Logger
- iremoved relative imports - not used in other mailu modules
- removed develepment comments
- added Mailconfig.check method to check for duplicate domain names
- converted .format() to .format_map() where possible
- switched to yaml multiline dump for dkim_key
- converted dkim_key import from regex to string functions
- automatically unhide/unexclude explicitly specified attributes on dump
- use field order when loading to stabilize import
- fail when using 'hash_password' without 'password'
- fixed logging of dkim_key
- fixed pruning and deleting of lists
- modified error messages
- added debug flag and two verbosity levels
---
core/admin/mailu/manage.py | 280 ++---------
core/admin/mailu/models.py | 40 +-
core/admin/mailu/schemas.py | 922 ++++++++++++++++++++++++------------
docs/cli.rst | 15 +-
4 files changed, 688 insertions(+), 569 deletions(-)
diff --git a/core/admin/mailu/manage.py b/core/admin/mailu/manage.py
index bef49faa..f9add0f4 100644
--- a/core/admin/mailu/manage.py
+++ b/core/admin/mailu/manage.py
@@ -4,22 +4,16 @@
import sys
import os
import socket
-import logging
import uuid
-from collections import Counter
-from itertools import chain
-
import click
-import sqlalchemy
import yaml
from flask import current_app as app
from flask.cli import FlaskGroup, with_appcontext
-from marshmallow.exceptions import ValidationError
-from . import models
-from .schemas import MailuSchema, get_schema, get_fieldspec, colorize, canColorize, RenderJSON, HIDDEN
+from mailu import models
+from mailu.schemas import MailuSchema, Logger, RenderJSON
db = models.db
@@ -326,246 +320,53 @@ def config_update(verbose=False, delete_objects=False):
@mailu.command()
@click.option('-v', '--verbose', count=True, help='Increase verbosity.')
@click.option('-s', '--secrets', is_flag=True, help='Show secret attributes in messages.')
+@click.option('-d', '--debug', is_flag=True, help='Enable debug output.')
@click.option('-q', '--quiet', is_flag=True, help='Quiet mode - only show errors.')
@click.option('-c', '--color', is_flag=True, help='Force colorized output.')
@click.option('-u', '--update', is_flag=True, help='Update mode - merge input with existing config.')
@click.option('-n', '--dry-run', is_flag=True, help='Perform a trial run with no changes made.')
@click.argument('source', metavar='[FILENAME|-]', type=click.File(mode='r'), default=sys.stdin)
@with_appcontext
-def config_import(verbose=0, secrets=False, quiet=False, color=False, update=False, dry_run=False, source=None):
+def config_import(verbose=0, secrets=False, debug=False, quiet=False, color=False,
+ update=False, dry_run=False, source=None):
""" Import configuration as YAML or JSON from stdin or file
"""
- # verbose
- # 0 : only show number of changes
- # 1 : also show detailed changes
- # 2 : also show input data
- # 3 : also show sql queries (also needs -s, as sql may contain secrets)
- # 4 : also show tracebacks (also needs -s, as tracebacks may contain secrets)
+ log = Logger(want_color=color or None, can_color=sys.stdout.isatty(), secrets=secrets, debug=debug)
+ log.lexer = 'python'
+ log.strip = True
+ log.verbose = 0 if quiet else verbose
+ log.quiet = quiet
- if quiet:
- verbose = -1
-
- if verbose > 2 and not secrets:
- print('[Warning] Verbosity level capped to 2. Specify --secrets to log sql and tracebacks.')
- verbose = 2
-
- color_cfg = {
- 'color': color or (canColorize and sys.stdout.isatty()),
- 'lexer': 'python',
- 'strip': True,
- }
-
- counter = Counter()
- logger = {}
-
- def format_errors(store, path=None):
-
- res = []
- if path is None:
- path = []
- for key in sorted(store):
- location = path + [str(key)]
- value = store[key]
- if isinstance(value, dict):
- res.extend(format_errors(value, location))
- else:
- for message in value:
- res.append((".".join(location), message))
-
- if path:
- return res
-
- fmt = f' - {{:<{max([len(loc) for loc, msg in res])}}} : {{}}'
- res = [fmt.format(loc, msg) for loc, msg in res]
- num = f'error{["s",""][len(res)==1]}'
- res.insert(0, f'[ValidationError] {len(res)} {num} occurred during input validation')
-
- return '\n'.join(res)
-
- def format_changes(*message):
- if counter:
- changes = []
- last = None
- for (action, what), count in sorted(counter.items()):
- if action != last:
- if last:
- changes.append('/')
- changes.append(f'{action}:')
- last = action
- changes.append(f'{what}({count})')
- else:
- changes = ['No changes.']
- return chain(message, changes)
-
- def log(action, target, message=None):
-
- if message is None:
- try:
- message = logger[target.__class__].dump(target)
- except KeyError:
- message = target
- if not isinstance(message, str):
- message = repr(message)
- print(f'{action} {target.__table__}: {colorize(message, **color_cfg)}')
-
- def listen_insert(mapper, connection, target): # pylint: disable=unused-argument
- """ callback function to track import """
- counter.update([('Created', target.__table__.name)])
- if verbose >= 1:
- log('Created', target)
-
- def listen_update(mapper, connection, target): # pylint: disable=unused-argument
- """ callback function to track import """
-
- changed = {}
- inspection = sqlalchemy.inspect(target)
- for attr in sqlalchemy.orm.class_mapper(target.__class__).column_attrs:
- history = getattr(inspection.attrs, attr.key).history
- if history.has_changes() and history.deleted:
- before = history.deleted[-1]
- after = getattr(target, attr.key)
- # TODO: remove special handling of "comment" after modifying model
- if attr.key == 'comment' and not before and not after:
- pass
- # only remember changed keys
- elif before != after:
- if verbose >= 1:
- changed[str(attr.key)] = (before, after)
- else:
- break
-
- if verbose >= 1:
- # use schema with dump_context to hide secrets and sort keys
- dumped = get_schema(target)(only=changed.keys(), context=diff_context).dump(target)
- for key, value in dumped.items():
- before, after = changed[key]
- if value == HIDDEN:
- before = HIDDEN if before else before
- after = HIDDEN if after else after
- else:
- # TODO: need to use schema to "convert" before value?
- after = value
- log('Modified', target, f'{str(target)!r} {key}: {before!r} -> {after!r}')
-
- if changed:
- counter.update([('Modified', target.__table__.name)])
-
- def listen_delete(mapper, connection, target): # pylint: disable=unused-argument
- """ callback function to track import """
- counter.update([('Deleted', target.__table__.name)])
- if verbose >= 1:
- log('Deleted', target)
-
- # TODO: this listener will not be necessary, if dkim keys would be stored in database
- _dedupe_dkim = set()
- def listen_dkim(session, flush_context): # pylint: disable=unused-argument
- """ callback function to track import """
- for target in session.identity_map.values():
- # look at Domains originally loaded from db
- if not isinstance(target, models.Domain) or not target._sa_instance_state.load_path:
- continue
- before = target._dkim_key_on_disk
- after = target._dkim_key
- if before != after:
- if secrets:
- before = before.decode('ascii', 'ignore')
- after = after.decode('ascii', 'ignore')
- else:
- before = HIDDEN if before else ''
- after = HIDDEN if after else ''
- # "de-dupe" messages; this event is fired at every flush
- if not (target, before, after) in _dedupe_dkim:
- _dedupe_dkim.add((target, before, after))
- counter.update([('Modified', target.__table__.name)])
- if verbose >= 1:
- log('Modified', target, f'{str(target)!r} dkim_key: {before!r} -> {after!r}')
-
- def track_serialize(obj, item, backref=None):
- """ callback function to track import """
- # called for backref modification?
- if backref is not None:
- log('Modified', item, '{target!r} {key}: {before!r} -> {after!r}'.format(**backref))
- return
- # show input data?
- if not verbose >= 2:
- return
- # hide secrets in data
- data = logger[obj.opts.model].hide(item)
- if 'hash_password' in data:
- data['password'] = HIDDEN
- if 'fetches' in data:
- for fetch in data['fetches']:
- fetch['password'] = HIDDEN
- log('Handling', obj.opts.model, data)
-
- # configure contexts
- diff_context = {
- 'full': True,
- 'secrets': secrets,
- }
- log_context = {
- 'secrets': secrets,
- }
- load_context = {
+ context = {
'import': True,
'update': update,
'clear': not update,
- 'callback': track_serialize,
+ 'callback': log.track_serialize,
}
- # register listeners
- for schema in get_schema():
- model = schema.Meta.model
- logger[model] = schema(context=log_context)
- sqlalchemy.event.listen(model, 'after_insert', listen_insert)
- sqlalchemy.event.listen(model, 'after_update', listen_update)
- sqlalchemy.event.listen(model, 'after_delete', listen_delete)
-
- # special listener for dkim_key changes
- sqlalchemy.event.listen(db.session, 'after_flush', listen_dkim)
-
- if verbose >= 3:
- logging.basicConfig()
- logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)
+ schema = MailuSchema(only=MailuSchema.Meta.order, context=context)
try:
+ # import source
with models.db.session.no_autoflush:
- config = MailuSchema(only=MailuSchema.Meta.order, context=load_context).loads(source)
- except ValidationError as exc:
- raise click.ClickException(format_errors(exc.messages)) from exc
+ config = schema.loads(source)
+ # flush session to show/count all changes
+ if not quiet and (dry_run or verbose):
+ db.session.flush()
+ # check for duplicate domain names
+ config.check()
except Exception as exc:
- if verbose >= 3:
- raise
- # (yaml.scanner.ScannerError, UnicodeDecodeError, ...)
- raise click.ClickException(
- f'[{exc.__class__.__name__}] '
- f'{" ".join(str(exc).split())}'
- ) from exc
-
- # flush session to show/count all changes
- if dry_run or verbose >= 1:
- db.session.flush()
-
- # check for duplicate domain names
- dup = set()
- for fqdn in chain(
- db.session.query(models.Domain.name),
- db.session.query(models.Alternative.name),
- db.session.query(models.Relay.name)
- ):
- if fqdn in dup:
- raise click.ClickException(f'[ValidationError] Duplicate domain name: {fqdn}')
- dup.add(fqdn)
+ if msg := log.format_exception(exc):
+ raise click.ClickException(msg) from exc
+ raise
# don't commit when running dry
if dry_run:
- if not quiet:
- print(*format_changes('Dry run. Not commiting changes.'))
+ log.changes('Dry run. Not committing changes.')
db.session.rollback()
else:
- if not quiet:
- print(*format_changes('Committing changes.'))
+ log.changes('Committing changes.')
db.session.commit()
@@ -573,8 +374,8 @@ def config_import(verbose=0, secrets=False, quiet=False, color=False, update=Fal
@click.option('-f', '--full', is_flag=True, help='Include attributes with default value.')
@click.option('-s', '--secrets', is_flag=True,
help='Include secret attributes (dkim-key, passwords).')
-@click.option('-c', '--color', is_flag=True, help='Force colorized output.')
@click.option('-d', '--dns', is_flag=True, help='Include dns records.')
+@click.option('-c', '--color', is_flag=True, help='Force colorized output.')
@click.option('-o', '--output-file', 'output', default=sys.stdout, type=click.File(mode='w'),
help='Save configuration to file.')
@click.option('-j', '--json', 'as_json', is_flag=True, help='Export configuration in json format.')
@@ -584,32 +385,25 @@ def config_export(full=False, secrets=False, color=False, dns=False, output=None
""" Export configuration as YAML or JSON to stdout or file
"""
- if only:
- for spec in only:
- if spec.split('.', 1)[0] not in MailuSchema.Meta.order:
- raise click.ClickException(f'[ValidationError] Unknown section: {spec}')
- else:
- only = MailuSchema.Meta.order
+ only = only or MailuSchema.Meta.order
context = {
'full': full,
'secrets': secrets,
'dns': dns,
}
-
- schema = MailuSchema(only=only, context=context)
- color_cfg = {'color': color or (canColorize and output.isatty())}
-
- if as_json:
- schema.opts.render_module = RenderJSON
- color_cfg['lexer'] = 'json'
- color_cfg['strip'] = True
+ log = Logger(want_color=color or None, can_color=output.isatty())
try:
- print(colorize(schema.dumps(models.MailuConfig()), **color_cfg), file=output)
- except ValueError as exc:
- if spec := get_fieldspec(exc):
- raise click.ClickException(f'[ValidationError] Invalid filter: {spec}') from exc
+ schema = MailuSchema(only=only, context=context)
+ if as_json:
+ schema.opts.render_module = RenderJSON
+ log.lexer = 'json'
+ log.strip = True
+ print(log.colorize(schema.dumps(models.MailuConfig())), file=output)
+ except Exception as exc:
+ if msg := log.format_exception(exc):
+ raise click.ClickException(msg) from exc
raise
diff --git a/core/admin/mailu/models.py b/core/admin/mailu/models.py
index 4c119984..1b3c787a 100644
--- a/core/admin/mailu/models.py
+++ b/core/admin/mailu/models.py
@@ -23,7 +23,7 @@ from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy.inspection import inspect
from werkzeug.utils import cached_property
-from . import dkim
+from mailu import dkim
db = flask_sqlalchemy.SQLAlchemy()
@@ -33,7 +33,6 @@ class IdnaDomain(db.TypeDecorator):
""" Stores a Unicode string in it's IDNA representation (ASCII only)
"""
- # TODO: use db.String(255)?
impl = db.String(80)
def process_bind_param(self, value, dialect):
@@ -50,7 +49,6 @@ class IdnaEmail(db.TypeDecorator):
""" Stores a Unicode string in it's IDNA representation (ASCII only)
"""
- # TODO: use db.String(254)?
impl = db.String(255)
def process_bind_param(self, value, dialect):
@@ -127,11 +125,7 @@ class Base(db.Model):
if pkey == 'email':
# ugly hack for email declared attr. _email is not always up2date
return str(f'{self.localpart}@{self.domain_name}')
- elif pkey in {'name', 'email'}:
- return str(getattr(self, pkey, None))
- else:
- return self.__repr__()
- return str(getattr(self, self.__table__.primary_key.columns.values()[0].name))
+ return str(getattr(self, pkey))
def __repr__(self):
return f'<{self.__class__.__name__} {str(self)!r}>'
@@ -145,12 +139,15 @@ class Base(db.Model):
else:
return NotImplemented
+ # we need hashable instances here for sqlalchemy to update collections
+ # in collections.bulk_replace, but auto-incrementing don't always have
+ # a valid primary key, in this case we use the object's id
+ __hashed = None
def __hash__(self):
- primary = getattr(self, self.__table__.primary_key.columns.values()[0].name)
- if primary is None:
- return NotImplemented
- else:
- return hash(primary)
+ if self.__hashed is None:
+ primary = getattr(self, self.__table__.primary_key.columns.values()[0].name)
+ self.__hashed = id(self) if primary is None else hash(primary)
+ return self.__hashed
# Many-to-many association table for domain managers
@@ -314,7 +311,6 @@ class Relay(Base):
__tablename__ = 'relay'
name = db.Column(IdnaDomain, primary_key=True, nullable=False)
- # TODO: use db.String(266)? transport(8):(1)[nexthop(255)](2)
smtp = db.Column(db.String(80), nullable=True)
@@ -322,7 +318,6 @@ class Email(object):
""" Abstraction for an email address (localpart and domain).
"""
- # TODO: use db.String(64)?
localpart = db.Column(db.String(80), nullable=False)
@declarative.declared_attr
@@ -342,7 +337,7 @@ class Email(object):
key = f'{cls.__tablename__}_email'
if key in ctx.current_parameters:
return ctx.current_parameters[key]
- return '{localpart}@{domain_name}'.format(**ctx.current_parameters)
+ return '{localpart}@{domain_name}'.format_map(ctx.current_parameters)
return db.Column('email', IdnaEmail, primary_key=True, nullable=False, onupdate=updater)
@@ -632,7 +627,6 @@ class Token(Base):
user = db.relationship(User,
backref=db.backref('tokens', cascade='all, delete-orphan'))
password = db.Column(db.String(255), nullable=False)
- # TODO: use db.String(32)?
ip = db.Column(db.String(255))
def check_password(self, password):
@@ -865,6 +859,18 @@ class MailuConfig:
if models is None or model in models:
db.session.query(model).delete()
+ def check(self):
+ """ check for duplicate domain names """
+ dup = set()
+ for fqdn in chain(
+ db.session.query(Domain.name),
+ db.session.query(Alternative.name),
+ db.session.query(Relay.name)
+ ):
+ if fqdn in dup:
+ raise ValueError(f'Duplicate domain name: {fqdn}')
+ dup.add(fqdn)
+
domain = MailuCollection(Domain)
user = MailuCollection(User)
alias = MailuCollection(Alias)
diff --git a/core/admin/mailu/schemas.py b/core/admin/mailu/schemas.py
index 6a8303c5..4c5042ea 100644
--- a/core/admin/mailu/schemas.py
+++ b/core/admin/mailu/schemas.py
@@ -2,10 +2,10 @@
"""
from copy import deepcopy
-from textwrap import wrap
+from collections import Counter
-import re
import json
+import logging
import yaml
import sqlalchemy
@@ -27,24 +27,309 @@ try:
from pygments.lexers.data import YamlLexer
from pygments.formatters import get_formatter_by_name
except ModuleNotFoundError:
- canColorize = False
+ COLOR_SUPPORTED = False
else:
- canColorize = True
+ COLOR_SUPPORTED = True
-from . import models, dkim
+from mailu import models, dkim
ma = Marshmallow()
-# TODO: how and where to mark keys as "required" while deserializing in api?
-# - when modifying, nothing is required (only the primary key, but this key is in the uri)
-# - the primary key from post data must not differ from the key in the uri
-# - when creating all fields without default or auto-increment are required
-# TODO: validate everything!
+
+### import logging and schema colorization ###
+
+_model2schema = {}
+
+def get_schema(cls=None):
+ """ return schema class for model """
+ if cls is None:
+ return _model2schema.values()
+ return _model2schema.get(cls)
+
+def mapped(cls):
+ """ register schema in model2schema map """
+ _model2schema[cls.Meta.model] = cls
+ return cls
+
+class MyYamlLexer(YamlLexer):
+ """ colorize yaml constants and integers """
+ def get_tokens(self, text, unfiltered=False):
+ for typ, value in super().get_tokens(text, unfiltered):
+ if typ is Token.Literal.Scalar.Plain:
+ if value in {'true', 'false', 'null'}:
+ typ = Token.Keyword.Constant
+ elif value == HIDDEN:
+ typ = Token.Error
+ else:
+ try:
+ int(value, 10)
+ except ValueError:
+ try:
+ float(value)
+ except ValueError:
+ pass
+ else:
+ typ = Token.Literal.Number.Float
+ else:
+ typ = Token.Literal.Number.Integer
+ yield typ, value
+
+class Logger:
+
+ def __init__(self, want_color=None, can_color=False, debug=False, secrets=False):
+
+ self.lexer = 'yaml'
+ self.formatter = 'terminal'
+ self.strip = False
+ self.verbose = 0
+ self.quiet = False
+ self.secrets = secrets
+ self.debug = debug
+ self.print = print
+
+ if want_color and not COLOR_SUPPORTED:
+ raise ValueError('Please install pygments to colorize output')
+
+ self.color = want_color or (can_color and COLOR_SUPPORTED)
+
+ self._counter = Counter()
+ self._schemas = {}
+
+ # log contexts
+ self._diff_context = {
+ 'full': True,
+ 'secrets': secrets,
+ }
+ log_context = {
+ 'secrets': secrets,
+ }
+
+ # register listeners
+ for schema in get_schema():
+ model = schema.Meta.model
+ self._schemas[model] = schema(context=log_context)
+ sqlalchemy.event.listen(model, 'after_insert', self._listen_insert)
+ sqlalchemy.event.listen(model, 'after_update', self._listen_update)
+ sqlalchemy.event.listen(model, 'after_delete', self._listen_delete)
+
+ # special listener for dkim_key changes
+ # TODO: _listen_dkim can be removed when dkim keys are stored in database
+ self._dedupe_dkim = set()
+ sqlalchemy.event.listen(models.db.session, 'after_flush', self._listen_dkim)
+
+ # register debug logger for sqlalchemy
+ if self.debug:
+ logging.basicConfig()
+ logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)
+
+ def _log(self, action, target, message=None):
+ if message is None:
+ try:
+ message = self._schemas[target.__class__].dump(target)
+ except KeyError:
+ message = target
+ if not isinstance(message, str):
+ message = repr(message)
+ self.print(f'{action} {target.__table__}: {self.colorize(message)}')
+
+ def _listen_insert(self, mapper, connection, target): # pylint: disable=unused-argument
+ """ callback method to track import """
+ self._counter.update([('Created', target.__table__.name)])
+ if self.verbose:
+ self._log('Created', target)
+
+ def _listen_update(self, mapper, connection, target): # pylint: disable=unused-argument
+ """ callback method to track import """
+
+ changes = {}
+ inspection = sqlalchemy.inspect(target)
+ for attr in sqlalchemy.orm.class_mapper(target.__class__).column_attrs:
+ history = getattr(inspection.attrs, attr.key).history
+ if history.has_changes() and history.deleted:
+ before = history.deleted[-1]
+ after = getattr(target, attr.key)
+ # TODO: this can be removed when comment is not nullable in model
+ if attr.key == 'comment' and not before and not after:
+ pass
+ # only remember changed keys
+ elif before != after:
+ if self.verbose:
+ changes[str(attr.key)] = (before, after)
+ else:
+ break
+
+ if self.verbose:
+ # use schema to log changed attributes
+ schema = get_schema(target.__class__)
+ only = set(changes.keys()) & set(schema().fields.keys())
+ if only:
+ for key, value in schema(
+ only=only,
+ context=self._diff_context
+ ).dump(target).items():
+ before, after = changes[key]
+ if value == HIDDEN:
+ before = HIDDEN if before else before
+ after = HIDDEN if after else after
+ else:
+ # also hide this
+ after = value
+ self._log('Modified', target, f'{str(target)!r} {key}: {before!r} -> {after!r}')
+
+ if changes:
+ self._counter.update([('Modified', target.__table__.name)])
+
+ def _listen_delete(self, mapper, connection, target): # pylint: disable=unused-argument
+ """ callback method to track import """
+ self._counter.update([('Deleted', target.__table__.name)])
+ if self.verbose:
+ self._log('Deleted', target)
+
+ # TODO: _listen_dkim can be removed when dkim keys are stored in database
+ def _listen_dkim(self, session, flush_context): # pylint: disable=unused-argument
+ """ callback method to track import """
+ for target in session.identity_map.values():
+ # look at Domains originally loaded from db
+ if not isinstance(target, models.Domain) or not target._sa_instance_state.load_path:
+ continue
+ before = target._dkim_key_on_disk
+ after = target._dkim_key
+ if before != after:
+ # "de-dupe" messages; this event is fired at every flush
+ if not (target, before, after) in self._dedupe_dkim:
+ self._dedupe_dkim.add((target, before, after))
+ self._counter.update([('Modified', target.__table__.name)])
+ if self.verbose:
+ if self.secrets:
+ before = before.decode('ascii', 'ignore')
+ after = after.decode('ascii', 'ignore')
+ else:
+ before = HIDDEN if before else ''
+ after = HIDDEN if after else ''
+ self._log('Modified', target, f'{str(target)!r} dkim_key: {before!r} -> {after!r}')
+
+ def track_serialize(self, obj, item, backref=None):
+ """ callback method to track import """
+ # called for backref modification?
+ if backref is not None:
+ self._log('Modified', item, '{target!r} {key}: {before!r} -> {after!r}'.format_map(backref))
+ return
+ # show input data?
+ if self.verbose < 2:
+ return
+ # hide secrets in data
+ if not self.secrets:
+ item = self._schemas[obj.opts.model].hide(item)
+ if 'hash_password' in item:
+ item['password'] = HIDDEN
+ if 'fetches' in item:
+ for fetch in item['fetches']:
+ fetch['password'] = HIDDEN
+ self._log('Handling', obj.opts.model, item)
+
+ def changes(self, *messages, **kwargs):
+ """ show changes gathered in counter """
+ if self.quiet:
+ return
+ if self._counter:
+ changes = []
+ last = None
+ for (action, what), count in sorted(self._counter.items()):
+ if action != last:
+ if last:
+ changes.append('/')
+ changes.append(f'{action}:')
+ last = action
+ changes.append(f'{what}({count})')
+ else:
+ changes = ['No changes.']
+ self.print(*messages, *changes, **kwargs)
+
+ def _format_errors(self, store, path=None):
+
+ res = []
+ if path is None:
+ path = []
+ for key in sorted(store):
+ location = path + [str(key)]
+ value = store[key]
+ if isinstance(value, dict):
+ res.extend(self._format_errors(value, location))
+ else:
+ for message in value:
+ res.append((".".join(location), message))
+
+ if path:
+ return res
+
+ maxlen = max([len(loc) for loc, msg in res])
+ res = [f' - {loc.ljust(maxlen)} : {msg}' for loc, msg in res]
+ errors = f'{len(res)} error{["s",""][len(res)==1]}'
+ res.insert(0, f'[ValidationError] {errors} occurred during input validation')
+
+ return '\n'.join(res)
+
+ def _is_validation_error(self, exc):
+ """ walk traceback to extract invalid field from marshmallow """
+ path = []
+ trace = exc.__traceback__
+ while trace:
+ if trace.tb_frame.f_code.co_name == '_serialize':
+ if 'attr' in trace.tb_frame.f_locals:
+ path.append(trace.tb_frame.f_locals['attr'])
+ elif trace.tb_frame.f_code.co_name == '_init_fields':
+ spec = ', '.join(['.'.join(path + [key]) for key in trace.tb_frame.f_locals['invalid_fields']])
+ return f'Invalid filter: {spec}'
+ trace = trace.tb_next
+ return None
+
+ def format_exception(self, exc):
+ """ format ValidationErrors and other exceptions when not debugging """
+ if isinstance(exc, ValidationError):
+ return self._format_errors(exc.messages)
+ if isinstance(exc, ValueError):
+ if msg := self._is_validation_error(exc):
+ return msg
+ if self.debug:
+ return None
+ msg = ' '.join(str(exc).split())
+ return f'[{exc.__class__.__name__}] {msg}'
+
+ colorscheme = {
+ Token: ('', ''),
+ Token.Name.Tag: ('cyan', 'cyan'),
+ Token.Literal.Scalar: ('green', 'green'),
+ Token.Literal.String: ('green', 'green'),
+ Token.Name.Constant: ('green', 'green'), # multiline strings
+ Token.Keyword.Constant: ('magenta', 'magenta'),
+ Token.Literal.Number: ('magenta', 'magenta'),
+ Token.Error: ('red', 'red'),
+ Token.Name: ('red', 'red'),
+ Token.Operator: ('red', 'red'),
+ }
+
+ def colorize(self, data, lexer=None, formatter=None, color=None, strip=None):
+ """ add ANSI color to data """
+
+ if color is False or not self.color:
+ return data
+
+ lexer = lexer or self.lexer
+ lexer = MyYamlLexer() if lexer == 'yaml' else get_lexer_by_name(lexer)
+ formatter = get_formatter_by_name(formatter or self.formatter, colorscheme=self.colorscheme)
+ if strip is None:
+ strip = self.strip
+
+ res = highlight(data, lexer, formatter)
+ if strip:
+ return res.rstrip('\n')
+ return res
-### class for hidden values ###
+### marshmallow render modules ###
+# hidden attributes
class _Hidden:
def __bool__(self):
return False
@@ -58,107 +343,24 @@ class _Hidden:
return ''
__str__ = __repr__
-HIDDEN = _Hidden()
-
-
-### map model to schema ###
-
-_model2schema = {}
-
-def get_schema(model=None):
- """ return schema class for model or instance of model """
- if model is None:
- return _model2schema.values()
- else:
- return _model2schema.get(model) or _model2schema.get(model.__class__)
-
-def mapped(cls):
- """ register schema in model2schema map """
- _model2schema[cls.Meta.model] = cls
- return cls
-
-
-### helper functions ###
-
-def get_fieldspec(exc):
- """ walk traceback to extract spec of invalid field from marshmallow """
- path = []
- tbck = exc.__traceback__
- while tbck:
- if tbck.tb_frame.f_code.co_name == '_serialize':
- if 'attr' in tbck.tb_frame.f_locals:
- path.append(tbck.tb_frame.f_locals['attr'])
- elif tbck.tb_frame.f_code.co_name == '_init_fields':
- path = '.'.join(path)
- spec = ', '.join([f'{path}.{key}' for key in tbck.tb_frame.f_locals['invalid_fields']])
- return spec
- tbck = tbck.tb_next
- return None
-
-def colorize(data, lexer='yaml', formatter='terminal', color=None, strip=False):
- """ add ANSI color to data """
- if color is None:
- # autodetect colorize
- color = canColorize
- if not color:
- # no color wanted
- return data
- if not canColorize:
- # want color, but not supported
- raise ValueError('Please install pygments to colorize output')
-
- scheme = {
- Token: ('', ''),
- Token.Name.Tag: ('cyan', 'brightcyan'),
- Token.Literal.Scalar: ('green', 'green'),
- Token.Literal.String: ('green', 'green'),
- Token.Keyword.Constant: ('magenta', 'brightmagenta'),
- Token.Literal.Number: ('magenta', 'brightmagenta'),
- Token.Error: ('red', 'brightred'),
- Token.Name: ('red', 'brightred'),
- Token.Operator: ('red', 'brightred'),
- }
-
- class MyYamlLexer(YamlLexer):
- """ colorize yaml constants and integers """
- def get_tokens(self, text, unfiltered=False):
- for typ, value in super().get_tokens(text, unfiltered):
- if typ is Token.Literal.Scalar.Plain:
- if value in {'true', 'false', 'null'}:
- typ = Token.Keyword.Constant
- elif value == HIDDEN:
- typ = Token.Error
- else:
- try:
- int(value, 10)
- except ValueError:
- try:
- float(value)
- except ValueError:
- pass
- else:
- typ = Token.Literal.Number.Float
- else:
- typ = Token.Literal.Number.Integer
- yield typ, value
-
- res = highlight(
- data,
- MyYamlLexer() if lexer == 'yaml' else get_lexer_by_name(lexer),
- get_formatter_by_name(formatter, colorscheme=scheme)
- )
-
- return res.rstrip('\n') if strip else res
-
-
-### render modules ###
-
-# allow yaml to represent hidden attributes
yaml.add_representer(
_Hidden,
- lambda cls, data: cls.represent_data(str(data))
+ lambda dumper, data: dumper.represent_data(str(data))
)
+HIDDEN = _Hidden()
+
+# multiline attributes
+class _Multiline(str):
+ pass
+
+yaml.add_representer(
+ _Multiline,
+ lambda dumper, data: dumper.represent_scalar(u'tag:yaml.org,2002:str', data, style='|')
+
+)
+
+# yaml render module
class RenderYAML:
""" Marshmallow YAML Render Module
"""
@@ -178,7 +380,7 @@ class RenderYAML:
@staticmethod
def _augment(kwargs, defaults):
- """ add default kv's to kwargs if missing
+ """ add defaults to kwargs if missing
"""
for key, value in defaults.items():
if key not in kwargs:
@@ -205,6 +407,7 @@ class RenderYAML:
cls._augment(kwargs, cls._dump_defaults)
return yaml.dump(*args, **kwargs)
+# json encoder
class JSONEncoder(json.JSONEncoder):
""" JSONEncoder supporting serialization of HIDDEN """
def default(self, o):
@@ -213,13 +416,14 @@ class JSONEncoder(json.JSONEncoder):
return str(o)
return json.JSONEncoder.default(self, o)
+# json render module
class RenderJSON:
""" Marshmallow JSON Render Module
"""
@staticmethod
def _augment(kwargs, defaults):
- """ add default kv's to kwargs if missing
+ """ add defaults to kwargs if missing
"""
for key, value in defaults.items():
if key not in kwargs:
@@ -245,7 +449,7 @@ class RenderJSON:
return json.dumps(*args, **kwargs)
-### custom fields ###
+### schema: custom fields ###
class LazyStringField(fields.String):
""" Field that serializes a "false" value to the empty string
@@ -261,6 +465,11 @@ class CommaSeparatedListField(fields.Raw):
a list of strings
"""
+ default_error_messages = {
+ "invalid": "Not a valid string or list.",
+ "invalid_utf8": "Not a valid utf-8 string or list.",
+ }
+
def _deserialize(self, value, attr, data, **kwargs):
""" deserialize comma separated string to list of strings
"""
@@ -269,16 +478,31 @@ class CommaSeparatedListField(fields.Raw):
if not value:
return []
- # split string
- if isinstance(value, str):
- return list([item.strip() for item in value.split(',') if item.strip()])
+ # handle list
+ if isinstance(value, list):
+ try:
+ value = [ensure_text_type(item) for item in value]
+ except UnicodeDecodeError as exc:
+ raise self.make_error("invalid_utf8") from exc
+
+ # handle text
else:
- return value
+ if not isinstance(value, (str, bytes)):
+ raise self.make_error("invalid")
+ try:
+ value = ensure_text_type(value)
+ except UnicodeDecodeError as exc:
+ raise self.make_error("invalid_utf8") from exc
+ else:
+ value = filter(None, [item.strip() for item in value.split(',')])
+
+ return list(value)
class DkimKeyField(fields.String):
- """ Serialize a dkim key to a list of strings (lines) and
- Deserialize a string or list of strings to a valid dkim key
+ """ Serialize a dkim key to a multiline string and
+ deserialize a dkim key data as string or list of strings
+ to a valid dkim key
"""
default_error_messages = {
@@ -286,21 +510,26 @@ class DkimKeyField(fields.String):
"invalid_utf8": "Not a valid utf-8 string or list.",
}
- _clean_re = re.compile(
- r'(^-----BEGIN (RSA )?PRIVATE KEY-----|-----END (RSA )?PRIVATE KEY-----$|\s+)',
- flags=re.UNICODE
- )
-
def _serialize(self, value, attr, obj, **kwargs):
- """ serialize dkim key to a list of strings (lines)
+ """ serialize dkim key as multiline string
"""
# map empty string and None to None
if not value:
- return None
+ return ''
- # return list of key lines without header/footer
- return value.decode('utf-8').strip().split('\n')[1:-1]
+ # return multiline string
+ return _Multiline(value.decode('utf-8'))
+
+ def _wrap_key(self, begin, data, end):
+ """ generator to wrap key into RFC 7468 format """
+ yield begin
+ pos = 0
+ while pos < len(data):
+ yield data[pos:pos+64]
+ pos += 64
+ yield end
+ yield ''
def _deserialize(self, value, attr, data, **kwargs):
""" deserialize a string or list of strings to dkim key data
@@ -310,7 +539,7 @@ class DkimKeyField(fields.String):
# convert list to str
if isinstance(value, list):
try:
- value = ''.join([ensure_text_type(item) for item in value])
+ value = ''.join([ensure_text_type(item) for item in value]).strip()
except UnicodeDecodeError as exc:
raise self.make_error("invalid_utf8") from exc
@@ -319,34 +548,53 @@ class DkimKeyField(fields.String):
if not isinstance(value, (str, bytes)):
raise self.make_error("invalid")
try:
- value = ensure_text_type(value)
+ value = ensure_text_type(value).strip()
except UnicodeDecodeError as exc:
raise self.make_error("invalid_utf8") from exc
- # clean value (remove whitespace and header/footer)
- value = self._clean_re.sub('', value.strip())
+ # generate new key?
+ if value.lower() == '-generate-':
+ return dkim.gen_key()
- # map empty string/list to None
+ # no key?
if not value:
return None
- # handle special value 'generate'
- elif value == 'generate':
- return dkim.gen_key()
+ # remember part of value for ValidationError
+ bad_key = value
- # remember some keydata for error message
- keydata = f'{value[:25]}...{value[-10:]}' if len(value) > 40 else value
+ # strip header and footer, clean whitespace and wrap to 64 characters
+ try:
+ if value.startswith('-----BEGIN '):
+ end = value.index('-----', 11) + 5
+ header = value[:end]
+ value = value[end:]
+ else:
+ header = '-----BEGIN PRIVATE KEY-----'
- # wrap value into valid pem layout and check validity
- value = (
- '-----BEGIN PRIVATE KEY-----\n' +
- '\n'.join(wrap(value, 64)) +
- '\n-----END PRIVATE KEY-----\n'
- ).encode('ascii')
+ if (pos := value.find('-----END ')) >= 0:
+ end = value.index('-----', pos+9) + 5
+ footer = value[pos:end]
+ value = value[:pos]
+ else:
+ footer = '-----END PRIVATE KEY-----'
+ except ValueError:
+ raise ValidationError(f'invalid dkim key {bad_key!r}') from exc
+
+ # remove whitespace from key data
+ value = ''.join(value.split())
+
+ # remember part of value for ValidationError
+ bad_key = f'{value[:25]}...{value[-10:]}' if len(value) > 40 else value
+
+ # wrap key according to RFC 7468
+ value = ('\n'.join(self._wrap_key(header, value, footer))).encode('ascii')
+
+ # check key validity
try:
crypto.load_privatekey(crypto.FILETYPE_PEM, value)
except crypto.Error as exc:
- raise ValidationError(f'invalid dkim key {keydata!r}') from exc
+ raise ValidationError(f'invalid dkim key {bad_key!r}') from exc
else:
return value
@@ -398,6 +646,27 @@ class PasswordField(fields.Str):
### base schema ###
+class Storage:
+ """ Storage class to save information in context
+ """
+
+ context = {}
+
+ def _bind(self, key, bind):
+ if bind is True:
+ return (self.__class__, key)
+ if isinstance(bind, str):
+ return (get_schema(self.recall(bind).__class__), key)
+ return (bind, key)
+
+ def store(self, key, value, bind=None):
+ """ store value under key """
+ self.context.setdefault('_track', {})[self._bind(key, bind)]= value
+
+ def recall(self, key, bind=None):
+ """ recall value from key """
+ return self.context['_track'][self._bind(key, bind)]
+
class BaseOpts(SQLAlchemyAutoSchemaOpts):
""" Option class with sqla session
"""
@@ -408,7 +677,7 @@ class BaseOpts(SQLAlchemyAutoSchemaOpts):
meta.sibling = False
super(BaseOpts, self).__init__(meta, ordered=ordered)
-class BaseSchema(ma.SQLAlchemyAutoSchema):
+class BaseSchema(ma.SQLAlchemyAutoSchema, Storage):
""" Marshmallow base schema with custom exclude logic
and option to hide sqla defaults
"""
@@ -425,6 +694,9 @@ class BaseSchema(ma.SQLAlchemyAutoSchema):
def __init__(self, *args, **kwargs):
+ # prepare only to auto-include explicitly specified attributes
+ only = set(kwargs.get('only') or [])
+
# get context
context = kwargs.get('context', {})
flags = {key for key, value in context.items() if value is True}
@@ -433,13 +705,13 @@ class BaseSchema(ma.SQLAlchemyAutoSchema):
exclude = set(kwargs.get('exclude', []))
# always exclude
- exclude.update({'created_at', 'updated_at'})
+ exclude.update({'created_at', 'updated_at'} - only)
# add include_by_context
if context is not None:
for need, what in getattr(self.Meta, 'include_by_context', {}).items():
if not flags & set(need):
- exclude |= set(what)
+ exclude |= what - only
# update excludes
kwargs['exclude'] = exclude
@@ -448,12 +720,15 @@ class BaseSchema(ma.SQLAlchemyAutoSchema):
super().__init__(*args, **kwargs)
# exclude_by_value
- self._exclude_by_value = getattr(self.Meta, 'exclude_by_value', {})
+ self._exclude_by_value = {
+ key: values for key, values in getattr(self.Meta, 'exclude_by_value', {}).items()
+ if key not in only
+ }
# exclude default values
if not context.get('full'):
for column in self.opts.model.__table__.columns:
- if column.name not in exclude:
+ if column.name not in exclude and column.name not in only:
self._exclude_by_value.setdefault(column.name, []).append(
None if column.default is None else column.default.arg
)
@@ -463,7 +738,7 @@ class BaseSchema(ma.SQLAlchemyAutoSchema):
if context is not None:
for need, what in getattr(self.Meta, 'hide_by_context', {}).items():
if not flags & set(need):
- self._hide_by_context |= set(what)
+ self._hide_by_context |= what - only
# remember primary keys
self._primary = str(self.opts.model.__table__.primary_key.columns.values()[0].name)
@@ -479,20 +754,13 @@ class BaseSchema(ma.SQLAlchemyAutoSchema):
order.remove(self._primary)
order.insert(0, self._primary)
- # order dump_fields
- for field in order:
- if field in self.dump_fields:
- self.dump_fields[field] = self.dump_fields.pop(field)
+ # order fieldlists
+ for fieldlist in (self.fields, self.load_fields, self.dump_fields):
+ for field in order:
+ if field in fieldlist:
+ fieldlist[field] = fieldlist.pop(field)
- # move pre_load hook "_track_import" to the front
- hooks = self._hooks[('pre_load', False)]
- hooks.remove('_track_import')
- hooks.insert(0, '_track_import')
- # move pre_load hook "_add_instance" to the end
- hooks.remove('_add_required')
- hooks.append('_add_required')
-
- # move post_load hook "_add_instance" to the end
+ # move post_load hook "_add_instance" to the end (after load_instance mixin)
hooks = self._hooks[('post_load', False)]
hooks.remove('_add_instance')
hooks.append('_add_instance')
@@ -506,8 +774,8 @@ class BaseSchema(ma.SQLAlchemyAutoSchema):
}
def _call_and_store(self, *args, **kwargs):
- """ track curent parent field for pruning """
- self.context['parent_field'] = kwargs['field_name']
+ """ track current parent field for pruning """
+ self.store('field', kwargs['field_name'], True)
return super()._call_and_store(*args, **kwargs)
# this is only needed to work around the declared attr "email" primary key in model
@@ -518,11 +786,13 @@ class BaseSchema(ma.SQLAlchemyAutoSchema):
if keys := getattr(self.Meta, 'primary_keys', None):
filters = {key: data.get(key) for key in keys}
if None not in filters.values():
- return self.session.query(self.opts.model).filter_by(**filters).first()
- return super().get_instance(data)
+ res= self.session.query(self.opts.model).filter_by(**filters).first()
+ return res
+ res= super().get_instance(data)
+ return res
@pre_load(pass_many=True)
- def _patch_input(self, items, many, **kwargs): # pylint: disable=unused-argument
+ def _patch_many(self, items, many, **kwargs): # pylint: disable=unused-argument
""" - flush sqla session before serializing a section when requested
(make sure all objects that could be referred to later are created)
- when in update mode: patch input data before deserialization
@@ -540,12 +810,19 @@ class BaseSchema(ma.SQLAlchemyAutoSchema):
# patch "delete", "prune" and "default"
want_prune = []
- def patch(count, data, prune):
+ def patch(count, data):
# don't allow __delete__ coming from input
if '__delete__' in data:
raise ValidationError('Unknown field.', f'{count}.__delete__')
+ # fail when hash_password is specified without password
+ if 'hash_password' in data and not 'password' in data:
+ raise ValidationError(
+ 'Nothing to hash. Field "password" is missing.',
+ field_name = f'{count}.hash_password',
+ )
+
# handle "prune list" and "delete item" (-pkey: none and -pkey: id)
for key in data:
if key.startswith('-'):
@@ -553,10 +830,10 @@ class BaseSchema(ma.SQLAlchemyAutoSchema):
# delete or prune
if data[key] is None:
# prune
- prune.append(True)
+ want_prune.append(True)
return None
# mark item for deletion
- return {key[1:]: data[key], '__delete__': True}
+ return {key[1:]: data[key], '__delete__': count}
# handle "set to default value" (-key: none)
def set_default(key, value):
@@ -567,7 +844,7 @@ class BaseSchema(ma.SQLAlchemyAutoSchema):
return (key, None)
if value is not None:
raise ValidationError(
- 'When resetting to default value must be null.',
+ 'Value must be "null" when resetting to default.',
f'{count}.{key}'
)
value = self.opts.model.__table__.columns[key].default
@@ -583,10 +860,128 @@ class BaseSchema(ma.SQLAlchemyAutoSchema):
# convert items to "delete" and filter "prune" item
items = [
item for item in [
- patch(count, item, want_prune) for count, item in enumerate(items)
+ patch(count, item) for count, item in enumerate(items)
] if item
]
+ # remember if prune was requested for _prune_items@post_load
+ self.store('prune', bool(want_prune), True)
+
+ # remember original items to stabilize password-changes in _add_instance@post_load
+ self.store('original', items, True)
+
+ return items
+
+ @pre_load
+ def _patch_item(self, data, many, **kwargs): # pylint: disable=unused-argument
+ """ - call callback function to track import
+ - stabilize import of items with auto-increment primary key
+ - delete items
+ - delete/prune list attributes
+ - add missing required attributes
+ """
+
+ # callback
+ if callback := self.context.get('callback'):
+ callback(self, data)
+
+ # stop early when not updating
+ if not self.opts.load_instance or not self.context.get('update'):
+ return data
+
+ # stabilize import of auto-increment primary keys (not required),
+ # by matching import data to existing items and setting primary key
+ if not self._primary in data:
+ for item in getattr(self.recall('parent'), self.recall('field', 'parent')):
+ existing = self.dump(item, many=False)
+ this = existing.pop(self._primary)
+ if data == existing:
+ instance = item
+ data[self._primary] = this
+ break
+
+ # try to load instance
+ instance = self.instance or self.get_instance(data)
+ if instance is None:
+
+ if '__delete__' in data:
+ # deletion of non-existent item requested
+ raise ValidationError(
+ f'Item to delete not found: {data[self._primary]!r}.',
+ field_name = f'{data["__delete__"]}.{self._primary}',
+ )
+
+ else:
+
+ if self.context.get('update'):
+ # remember instance as parent for pruning siblings
+ if not self.Meta.sibling:
+ self.store('parent', instance)
+ # delete instance from session when marked
+ if '__delete__' in data:
+ self.opts.sqla_session.delete(instance)
+ # delete item from lists or prune lists
+ # currently: domain.alternatives, user.forward_destination,
+ # user.manager_of, aliases.destination
+ for key, value in data.items():
+ if not isinstance(self.fields.get(key), (
+ RelatedList, CommaSeparatedListField, fields.Raw)
+ ) or not isinstance(value, list):
+ continue
+ # deduplicate new value
+ new_value = set(value)
+ # handle list pruning
+ if '-prune-' in value:
+ value.remove('-prune-')
+ new_value.remove('-prune-')
+ else:
+ for old in getattr(instance, key):
+ # using str() is okay for now (see above)
+ new_value.add(str(old))
+ # handle item deletion
+ for item in value:
+ if item.startswith('-'):
+ new_value.remove(item)
+ try:
+ new_value.remove(item[1:])
+ except KeyError as exc:
+ raise ValidationError(
+ f'Item to delete not found: {item[1:]!r}.',
+ field_name=f'?.{key}',
+ ) from exc
+ # sort list of new values
+ data[key] = sorted(new_value)
+ # log backref modification not catched by modify hook
+ if isinstance(self.fields[key], RelatedList):
+ if callback := self.context.get('callback'):
+ before = {str(v) for v in getattr(instance, key)}
+ after = set(data[key])
+ if before != after:
+ callback(self, instance, {
+ 'key': key,
+ 'target': str(instance),
+ 'before': before,
+ 'after': after,
+ })
+
+ # add attributes required for validation from db
+ for attr_name, field_obj in self.load_fields.items():
+ if field_obj.required and attr_name not in data:
+ data[attr_name] = getattr(instance, attr_name)
+
+ return data
+
+ @post_load(pass_many=True)
+ def _prune_items(self, items, many, **kwargs): # pylint: disable=unused-argument
+ """ handle list pruning """
+
+ # stop early when not updating
+ if not self.context.get('update'):
+ return items
+
+ # get prune flag from _patch_many@pre_load
+ want_prune = self.recall('prune', True)
+
# prune: determine if existing items in db need to be added or marked for deletion
add_items = False
del_items = False
@@ -603,144 +998,60 @@ class BaseSchema(ma.SQLAlchemyAutoSchema):
if add_items or del_items:
existing = {item[self._primary] for item in items if self._primary in item}
- for item in getattr(self.context['parent'], self.context['parent_field']):
+ for item in getattr(self.recall('parent'), self.recall('field', 'parent')):
key = getattr(item, self._primary)
if key not in existing:
if add_items:
items.append({self._primary: key})
else:
- items.append({self._primary: key, '__delete__': True})
+ items.append({self._primary: key, '__delete__': '?'})
return items
- @pre_load
- def _track_import(self, data, many, **kwargs): # pylint: disable=unused-argument
- """ call callback function to track import
- """
- # callback
- if callback := self.context.get('callback'):
- callback(self, data)
-
- return data
-
- @pre_load
- def _add_required(self, data, many, **kwargs): # pylint: disable=unused-argument
- """ when updating:
- allow modification of existing items having required attributes
- by loading existing value from db
+ @post_load
+ def _add_instance(self, item, many, **kwargs): # pylint: disable=unused-argument
+ """ - undo password change in existing instances when plain password did not change
+ - add new instances to sqla session
"""
- if not self.opts.load_instance or not self.context.get('update'):
- return data
-
- # stabilize import of auto-increment primary keys (not required),
- # by matching import data to existing items and setting primary key
- if not self._primary in data:
- for item in getattr(self.context['parent'], self.context['parent_field']):
- existing = self.dump(item, many=False)
- this = existing.pop(self._primary)
- if data == existing:
- instance = item
- data[self._primary] = this
- break
-
- # try to load instance
- instance = self.instance or self.get_instance(data)
- if instance is None:
-
- if '__delete__' in data:
- # deletion of non-existent item requested
- raise ValidationError(
- f'item to delete not found: {data[self._primary]!r}',
- field_name=f'?.{self._primary}',
- )
-
- else:
-
- if self.context.get('update'):
- # remember instance as parent for pruning siblings
- if not self.Meta.sibling:
- self.context['parent'] = instance
- # delete instance when marked
- if '__delete__' in data:
- self.opts.sqla_session.delete(instance)
- # delete item from lists or prune lists
- # currently: domain.alternatives, user.forward_destination,
- # user.manager_of, aliases.destination
- for key, value in data.items():
- if not isinstance(self.fields[key], fields.Nested) and isinstance(value, list):
- new_value = set(value)
- # handle list pruning
- if '-prune-' in value:
- value.remove('-prune-')
- new_value.remove('-prune-')
- else:
- for old in getattr(instance, key):
- # using str() is okay for now (see above)
- new_value.add(str(old))
- # handle item deletion
- for item in value:
- if item.startswith('-'):
- new_value.remove(item)
- try:
- new_value.remove(item[1:])
- except KeyError as exc:
- raise ValidationError(
- f'item to delete not found: {item[1:]!r}',
- field_name=f'?.{key}',
- ) from exc
- # deduplicate and sort list
- data[key] = sorted(new_value)
- # log backref modification not catched by hook
- if isinstance(self.fields[key], RelatedList):
- if callback := self.context.get('callback'):
- callback(self, instance, {
- 'key': key,
- 'target': str(instance),
- 'before': [str(v) for v in getattr(instance, key)],
- 'after': data[key],
- })
-
-
-
- # add attributes required for validation from db
- # TODO: this will cause validation errors if value from database does not validate
- # but there should not be an invalid value in the database
- for attr_name, field_obj in self.load_fields.items():
- if field_obj.required and attr_name not in data:
- data[attr_name] = getattr(instance, attr_name)
-
- return data
-
- @post_load(pass_original=True)
- def _add_instance(self, item, original, many, **kwargs): # pylint: disable=unused-argument
- """ add new instances to sqla session """
-
- if item in self.opts.sqla_session:
- # item was modified
- if 'hash_password' in original:
- # stabilize import of passwords to be hashed,
- # by not re-hashing an unchanged password
- if attr := getattr(sqlalchemy.inspect(item).attrs, 'password', None):
- if attr.history.has_changes() and attr.history.deleted:
- try:
- # reset password hash, if password was not changed
- inst = type(item)(password=attr.history.deleted[-1])
- if inst.check_password(original['password']):
- item.password = inst.password
- except ValueError:
- # hash in db is invalid
- pass
- else:
- del inst
- else:
- # new item
+ if not item in self.opts.sqla_session:
self.opts.sqla_session.add(item)
+ return item
+
+ # stop early if item has no password attribute
+ if not hasattr(item, 'password'):
+ return item
+
+ # did we hash a new plaintext password?
+ original = None
+ pkey = getattr(item, self._primary)
+ for data in self.recall('original', True):
+ if 'hash_password' in data and data.get(self._primary) == pkey:
+ original = data['password']
+ break
+ if original is None:
+ # password was hashed by us
+ return item
+
+ # reset hash if plain password matches hash from db
+ if attr := getattr(sqlalchemy.inspect(item).attrs, 'password', None):
+ if attr.history.has_changes() and attr.history.deleted:
+ try:
+ # reset password hash
+ inst = type(item)(password=attr.history.deleted[-1])
+ if inst.check_password(original):
+ item.password = inst.password
+ except ValueError:
+ # hash in db is invalid
+ pass
+ else:
+ del inst
+
return item
@post_dump
def _hide_values(self, data, many, **kwargs): # pylint: disable=unused-argument
- """ hide secrets and order output """
+ """ hide secrets """
# stop early when not excluding/hiding
if not self._exclude_by_value and not self._hide_by_context:
@@ -757,7 +1068,7 @@ class BaseSchema(ma.SQLAlchemyAutoSchema):
# this field is used to mark items for deletion
mark_delete = fields.Boolean(data_key='__delete__', load_only=True)
- # TODO: remove LazyStringField (when model was changed - IMHO comment should not be nullable)
+ # TODO: this can be removed when comment is not nullable in model
comment = LazyStringField()
@@ -892,27 +1203,28 @@ class RelaySchema(BaseSchema):
load_instance = True
-class MailuSchema(Schema):
+@mapped
+class MailuSchema(Schema, Storage):
""" Marshmallow schema for complete Mailu config """
class Meta:
""" Schema config """
+ model = models.MailuConfig
render_module = RenderYAML
order = ['domain', 'user', 'alias', 'relay'] # 'config'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
- # order dump_fields
- for field in self.Meta.order:
- if field in self.dump_fields:
- self.dump_fields[field] = self.dump_fields.pop(field)
+ # order fieldlists
+ for fieldlist in (self.fields, self.load_fields, self.dump_fields):
+ for field in self.Meta.order:
+ if field in fieldlist:
+ fieldlist[field] = fieldlist.pop(field)
def _call_and_store(self, *args, **kwargs):
""" track current parent and field for pruning """
- self.context.update({
- 'parent': self.context.get('config'),
- 'parent_field': kwargs['field_name'],
- })
+ self.store('field', kwargs['field_name'], True)
+ self.store('parent', self.context.get('config'))
return super()._call_and_store(*args, **kwargs)
@pre_load
diff --git a/docs/cli.rst b/docs/cli.rst
index 6d48c576..891db152 100644
--- a/docs/cli.rst
+++ b/docs/cli.rst
@@ -138,8 +138,8 @@ The purpose of this command is to export the complete configuration in YAML or J
Options:
-f, --full Include attributes with default value.
-s, --secrets Include secret attributes (dkim-key, passwords).
- -c, --color Force colorized output.
-d, --dns Include dns records.
+ -c, --color Force colorized output.
-o, --output-file FILENAME Save configuration to file.
-j, --json Export configuration in json format.
-?, -h, --help Show this message and exit.
@@ -147,14 +147,18 @@ The purpose of this command is to export the complete configuration in YAML or J
Only non-default attributes are exported. If you want to export all attributes use ``--full``.
If you want to export plain-text secrets (dkim-keys, passwords) you have to add the ``--secrets`` option.
To include dns records (mx, spf, dkim and dmarc) add the ``--dns`` option.
+
By default all configuration objects are exported (domain, user, alias, relay). You can specify
filters to export only some objects or attributes (try: ``user`` or ``domain.name``).
+Attributes explicitly specified in filters are automatically exported: there is no need to add ``--secrets`` or ``--full``.
.. code-block:: bash
- $ docker-compose exec admin flask mailu config-export -o mail-config.yml
+ $ docker-compose exec admin flask mailu config-export --output mail-config.yml
- $ docker-compose exec admin flask mailu config-export --dns domain.dns_mx domain.dns_spf
+ $ docker-compose exec admin flask mailu config-export domain.dns_mx domain.dns_spf
+
+ $ docker-compose exec admin flask mailu config-export user.spam_threshold
config-import
-------------
@@ -211,7 +215,7 @@ mail-config.yml contains the configuration and looks like this:
config-update shows the number of created/modified/deleted objects after import.
To suppress all messages except error messages use ``--quiet``.
-By adding the ``--verbose`` switch (up to two times) the import gets more detailed and shows exactly what attributes changed.
+By adding the ``--verbose`` switch the import gets more detailed and shows exactly what attributes changed.
In all log messages plain-text secrets (dkim-keys, passwords) are hidden by default. Use ``--secrets`` to log secrets.
If you want to test what would be done when importing without committing any changes, use ``--dry-run``.
@@ -234,6 +238,9 @@ It is possible to delete a single element or prune all elements from lists and a
The ``-key: null`` notation can also be used to reset an attribute to its default.
To reset *spam_threshold* to it's default *80* use ``-spam_threshold: null``.
+A new dkim key can be generated when adding or modifying a domain, by using the special value
+``dkim_key: -generate-``.
+
This is a complete YAML template with all additional parameters that can be defined:
.. code-block:: yaml
From 0a9f732faa4d1addcc848cda2678976fb244314c Mon Sep 17 00:00:00 2001
From: Alexander Graf
Date: Mon, 22 Feb 2021 20:35:23 +0100
Subject: [PATCH 063/181] added docstring to Logger. use generators.
---
core/admin/mailu/models.py | 2 +-
core/admin/mailu/schemas.py | 48 +++++++++++++++++++++----------------
2 files changed, 28 insertions(+), 22 deletions(-)
diff --git a/core/admin/mailu/models.py b/core/admin/mailu/models.py
index 1b3c787a..d134086f 100644
--- a/core/admin/mailu/models.py
+++ b/core/admin/mailu/models.py
@@ -83,7 +83,7 @@ class CommaSeparatedList(db.TypeDecorator):
def process_result_value(self, value, dialect):
""" split comma separated string to list """
- return list(filter(bool, [item.strip() for item in value.split(',')])) if value else []
+ return list(filter(bool, (item.strip() for item in value.split(',')))) if value else []
python_type = list
diff --git a/core/admin/mailu/schemas.py b/core/admin/mailu/schemas.py
index 4c5042ea..68814170 100644
--- a/core/admin/mailu/schemas.py
+++ b/core/admin/mailu/schemas.py
@@ -76,6 +76,9 @@ class MyYamlLexer(YamlLexer):
yield typ, value
class Logger:
+ """ helps with counting and colorizing
+ imported and exported data
+ """
def __init__(self, want_color=None, can_color=False, debug=False, secrets=False):
@@ -195,25 +198,26 @@ class Logger:
continue
before = target._dkim_key_on_disk
after = target._dkim_key
- if before != after:
- # "de-dupe" messages; this event is fired at every flush
- if not (target, before, after) in self._dedupe_dkim:
- self._dedupe_dkim.add((target, before, after))
- self._counter.update([('Modified', target.__table__.name)])
- if self.verbose:
- if self.secrets:
- before = before.decode('ascii', 'ignore')
- after = after.decode('ascii', 'ignore')
- else:
- before = HIDDEN if before else ''
- after = HIDDEN if after else ''
- self._log('Modified', target, f'{str(target)!r} dkim_key: {before!r} -> {after!r}')
+ # "de-dupe" messages; this event is fired at every flush
+ if before == after or (target, before, after) in self._dedupe_dkim:
+ continue
+ self._dedupe_dkim.add((target, before, after))
+ self._counter.update([('Modified', target.__table__.name)])
+ if self.verbose:
+ if self.secrets:
+ before = before.decode('ascii', 'ignore')
+ after = after.decode('ascii', 'ignore')
+ else:
+ before = HIDDEN if before else ''
+ after = HIDDEN if after else ''
+ self._log('Modified', target, f'{str(target)!r} dkim_key: {before!r} -> {after!r}')
def track_serialize(self, obj, item, backref=None):
""" callback method to track import """
# called for backref modification?
if backref is not None:
- self._log('Modified', item, '{target!r} {key}: {before!r} -> {after!r}'.format_map(backref))
+ self._log(
+ 'Modified', item, '{target!r} {key}: {before!r} -> {after!r}'.format_map(backref))
return
# show input data?
if self.verbose < 2:
@@ -263,7 +267,7 @@ class Logger:
if path:
return res
- maxlen = max([len(loc) for loc, msg in res])
+ maxlen = max(len(loc) for loc, msg in res)
res = [f' - {loc.ljust(maxlen)} : {msg}' for loc, msg in res]
errors = f'{len(res)} error{["s",""][len(res)==1]}'
res.insert(0, f'[ValidationError] {errors} occurred during input validation')
@@ -279,7 +283,9 @@ class Logger:
if 'attr' in trace.tb_frame.f_locals:
path.append(trace.tb_frame.f_locals['attr'])
elif trace.tb_frame.f_code.co_name == '_init_fields':
- spec = ', '.join(['.'.join(path + [key]) for key in trace.tb_frame.f_locals['invalid_fields']])
+ spec = ', '.join(
+ '.'.join(path + [key])
+ for key in trace.tb_frame.f_locals['invalid_fields'])
return f'Invalid filter: {spec}'
trace = trace.tb_next
return None
@@ -494,7 +500,7 @@ class CommaSeparatedListField(fields.Raw):
except UnicodeDecodeError as exc:
raise self.make_error("invalid_utf8") from exc
else:
- value = filter(None, [item.strip() for item in value.split(',')])
+ value = filter(bool, (item.strip() for item in value.split(',')))
return list(value)
@@ -539,7 +545,7 @@ class DkimKeyField(fields.String):
# convert list to str
if isinstance(value, list):
try:
- value = ''.join([ensure_text_type(item) for item in value]).strip()
+ value = ''.join(ensure_text_type(item) for item in value).strip()
except UnicodeDecodeError as exc:
raise self.make_error("invalid_utf8") from exc
@@ -855,7 +861,7 @@ class BaseSchema(ma.SQLAlchemyAutoSchema, Storage):
)
return (key, value.arg)
- return dict([set_default(key, value) for key, value in data.items()])
+ return dict(set_default(key, value) for key, value in data.items())
# convert items to "delete" and filter "prune" item
items = [
@@ -1059,11 +1065,11 @@ class BaseSchema(ma.SQLAlchemyAutoSchema, Storage):
# exclude or hide values
full = self.context.get('full')
- return type(data)([
+ return type(data)(
(key, HIDDEN if key in self._hide_by_context else value)
for key, value in data.items()
if full or key not in self._exclude_by_value or value not in self._exclude_by_value[key]
- ])
+ )
# this field is used to mark items for deletion
mark_delete = fields.Boolean(data_key='__delete__', load_only=True)
From b55b53b781350addad83fe93ac68c5c6c7188847 Mon Sep 17 00:00:00 2001
From: Alexander Graf
Date: Fri, 26 Feb 2021 20:51:58 +0100
Subject: [PATCH 064/181] optimize generation of transport nexthop
---
core/admin/mailu/internal/views/postfix.py | 48 +++++++++++++++++++---
1 file changed, 43 insertions(+), 5 deletions(-)
diff --git a/core/admin/mailu/internal/views/postfix.py b/core/admin/mailu/internal/views/postfix.py
index a5507830..7f8418cf 100644
--- a/core/admin/mailu/internal/views/postfix.py
+++ b/core/admin/mailu/internal/views/postfix.py
@@ -2,6 +2,7 @@ from mailu import models
from mailu.internal import internal
import flask
+import idna
import re
import srslib
@@ -37,11 +38,48 @@ def postfix_transport(email):
return flask.abort(404)
localpart, domain_name = models.Email.resolve_domain(email)
relay = models.Relay.query.get(domain_name) or flask.abort(404)
- ret = "smtp:[{0}]".format(relay.smtp)
- if ":" in relay.smtp:
- split = relay.smtp.split(':')
- ret = "smtp:[{0}]:{1}".format(split[0], split[1])
- return flask.jsonify(ret)
+ target = relay.smtp.lower()
+ port = None
+ if use_mx := target.startswith('mx:'):
+ target = target[3:]
+ if target.startswith('['):
+ if use_mx or ']' not in target:
+ # invalid target (mx: and [])
+ flask.abort(400)
+ host, rest = target[1:].split(']', 1)
+ if rest.startswith(':'):
+ port = rest[1:]
+ elif rest:
+ # invalid target (rest should be :port)
+ flask.abort(400)
+ else:
+ if ':' in target:
+ host, port = target.rsplit(':', 1)
+ else:
+ host = target
+ if not host:
+ host = relay.name.lower()
+ use_mx = True
+ if ':' in host:
+ host = f'ipv6:{host}'
+ else:
+ try:
+ host = idna.encode(host).decode('ascii')
+ except idna.IDNAError:
+ # invalid target (fqdn not encodable)
+ flask.abort(400)
+ if port is not None:
+ try:
+ port = int(port, 10)
+ if port == 25:
+ port = None
+ except ValueError:
+ # invalid target (port should be numeric)
+ flask.abort(400)
+ if not use_mx:
+ host = f'[{host}]'
+ port = '' if port is None else f':{port}'
+ return flask.jsonify(f'smtp:{host}{port}')
@internal.route("/postfix/recipient/map/")
From e90d5548a651fe933b5977e15c52cd0be954e7dd Mon Sep 17 00:00:00 2001
From: Alexander Graf
Date: Wed, 10 Mar 2021 18:30:28 +0100
Subject: [PATCH 065/181] use RFC3339 for last_check
fixed to UTC for now
---
core/admin/mailu/schemas.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/core/admin/mailu/schemas.py b/core/admin/mailu/schemas.py
index 68814170..284c1551 100644
--- a/core/admin/mailu/schemas.py
+++ b/core/admin/mailu/schemas.py
@@ -1135,6 +1135,7 @@ class FetchSchema(BaseSchema):
""" Schema config """
model = models.Fetch
load_instance = True
+ datetimeformat = '%Y-%m-%dT%H:%M:%S.%fZ' # RFC3339, but fixed to UTC
sibling = True
include_by_context = {
From c17bfae24050e48ae83cc21196346cfe7c6d90bc Mon Sep 17 00:00:00 2001
From: Alexander Graf
Date: Wed, 10 Mar 2021 18:50:25 +0100
Subject: [PATCH 066/181] correct rfc3339 datetime serialization
now using correct timezone
---
core/admin/mailu/schemas.py | 17 +++++++++++++++--
1 file changed, 15 insertions(+), 2 deletions(-)
diff --git a/core/admin/mailu/schemas.py b/core/admin/mailu/schemas.py
index 284c1551..277748f7 100644
--- a/core/admin/mailu/schemas.py
+++ b/core/admin/mailu/schemas.py
@@ -3,6 +3,7 @@
from copy import deepcopy
from collections import Counter
+from datetime import timezone
import json
import logging
@@ -455,7 +456,20 @@ class RenderJSON:
return json.dumps(*args, **kwargs)
-### schema: custom fields ###
+### marshmallow: custom fields ###
+
+def _rfc3339(datetime):
+ """ dump datetime according to rfc3339 """
+ if datetime.tzinfo is None:
+ datetime = datetime.astimezone(timezone.utc)
+ res = datetime.isoformat()
+ if res.endswith('+00:00'):
+ return f'{res[:-6]}Z'
+ return res
+
+fields.DateTime.SERIALIZATION_FUNCS['rfc3339'] = _rfc3339
+fields.DateTime.DESERIALIZATION_FUNCS['rfc3339'] = fields.DateTime.DESERIALIZATION_FUNCS['iso']
+fields.DateTime.DEFAULT_FORMAT = 'rfc3339'
class LazyStringField(fields.String):
""" Field that serializes a "false" value to the empty string
@@ -1135,7 +1149,6 @@ class FetchSchema(BaseSchema):
""" Schema config """
model = models.Fetch
load_instance = True
- datetimeformat = '%Y-%m-%dT%H:%M:%S.%fZ' # RFC3339, but fixed to UTC
sibling = True
include_by_context = {
From ce9a9ec572d8220e673b2817f1350ffb53b3215a Mon Sep 17 00:00:00 2001
From: Alexander Graf
Date: Wed, 10 Mar 2021 18:50:52 +0100
Subject: [PATCH 067/181] always init Logger first
---
core/admin/mailu/manage.py | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/core/admin/mailu/manage.py b/core/admin/mailu/manage.py
index f9c58d26..5708327e 100644
--- a/core/admin/mailu/manage.py
+++ b/core/admin/mailu/manage.py
@@ -375,6 +375,8 @@ def config_export(full=False, secrets=False, color=False, dns=False, output=None
""" Export configuration as YAML or JSON to stdout or file
"""
+ log = Logger(want_color=color or None, can_color=output.isatty())
+
only = only or MailuSchema.Meta.order
context = {
@@ -382,7 +384,6 @@ def config_export(full=False, secrets=False, color=False, dns=False, output=None
'secrets': secrets,
'dns': dns,
}
- log = Logger(want_color=color or None, can_color=output.isatty())
try:
schema = MailuSchema(only=only, context=context)
From 9cb6962335d00fbb0505e7d571271232d4f54b97 Mon Sep 17 00:00:00 2001
From: Alexander Graf
Date: Thu, 11 Mar 2021 18:12:50 +0100
Subject: [PATCH 068/181] Moved MyYamlLexer into logger
now cmdline runs without pygments
---
core/admin/mailu/schemas.py | 48 ++++++++++++++++++-------------------
1 file changed, 24 insertions(+), 24 deletions(-)
diff --git a/core/admin/mailu/schemas.py b/core/admin/mailu/schemas.py
index 277748f7..fdd766b3 100644
--- a/core/admin/mailu/schemas.py
+++ b/core/admin/mailu/schemas.py
@@ -53,34 +53,34 @@ def mapped(cls):
_model2schema[cls.Meta.model] = cls
return cls
-class MyYamlLexer(YamlLexer):
- """ colorize yaml constants and integers """
- def get_tokens(self, text, unfiltered=False):
- for typ, value in super().get_tokens(text, unfiltered):
- if typ is Token.Literal.Scalar.Plain:
- if value in {'true', 'false', 'null'}:
- typ = Token.Keyword.Constant
- elif value == HIDDEN:
- typ = Token.Error
- else:
- try:
- int(value, 10)
- except ValueError:
- try:
- float(value)
- except ValueError:
- pass
- else:
- typ = Token.Literal.Number.Float
- else:
- typ = Token.Literal.Number.Integer
- yield typ, value
-
class Logger:
""" helps with counting and colorizing
imported and exported data
"""
+ class MyYamlLexer(YamlLexer):
+ """ colorize yaml constants and integers """
+ def get_tokens(self, text, unfiltered=False):
+ for typ, value in super().get_tokens(text, unfiltered):
+ if typ is Token.Literal.Scalar.Plain:
+ if value in {'true', 'false', 'null'}:
+ typ = Token.Keyword.Constant
+ elif value == HIDDEN:
+ typ = Token.Error
+ else:
+ try:
+ int(value, 10)
+ except ValueError:
+ try:
+ float(value)
+ except ValueError:
+ pass
+ else:
+ typ = Token.Literal.Number.Float
+ else:
+ typ = Token.Literal.Number.Integer
+ yield typ, value
+
def __init__(self, want_color=None, can_color=False, debug=False, secrets=False):
self.lexer = 'yaml'
@@ -323,7 +323,7 @@ class Logger:
return data
lexer = lexer or self.lexer
- lexer = MyYamlLexer() if lexer == 'yaml' else get_lexer_by_name(lexer)
+ lexer = Logger.MyYamlLexer() if lexer == 'yaml' else get_lexer_by_name(lexer)
formatter = get_formatter_by_name(formatter or self.formatter, colorscheme=self.colorscheme)
if strip is None:
strip = self.strip
From 0c38128c4e459aa6fb783c86bea51ae2e19ac9b7 Mon Sep 17 00:00:00 2001
From: Alexander Graf
Date: Thu, 11 Mar 2021 18:38:00 +0100
Subject: [PATCH 069/181] Add pygments to requirements
---
core/admin/mailu/schemas.py | 20 ++++++--------------
core/admin/requirements-prod.txt | 1 +
core/admin/requirements.txt | 1 +
3 files changed, 8 insertions(+), 14 deletions(-)
diff --git a/core/admin/mailu/schemas.py b/core/admin/mailu/schemas.py
index fdd766b3..28a5d6f4 100644
--- a/core/admin/mailu/schemas.py
+++ b/core/admin/mailu/schemas.py
@@ -21,16 +21,11 @@ from flask_marshmallow import Marshmallow
from OpenSSL import crypto
-try:
- from pygments import highlight
- from pygments.token import Token
- from pygments.lexers import get_lexer_by_name
- from pygments.lexers.data import YamlLexer
- from pygments.formatters import get_formatter_by_name
-except ModuleNotFoundError:
- COLOR_SUPPORTED = False
-else:
- COLOR_SUPPORTED = True
+from pygments import highlight
+from pygments.token import Token
+from pygments.lexers import get_lexer_by_name
+from pygments.lexers.data import YamlLexer
+from pygments.formatters import get_formatter_by_name
from mailu import models, dkim
@@ -92,10 +87,7 @@ class Logger:
self.debug = debug
self.print = print
- if want_color and not COLOR_SUPPORTED:
- raise ValueError('Please install pygments to colorize output')
-
- self.color = want_color or (can_color and COLOR_SUPPORTED)
+ self.color = want_color or can_color
self._counter = Counter()
self._schemas = {}
diff --git a/core/admin/requirements-prod.txt b/core/admin/requirements-prod.txt
index 7620fd95..5c291e24 100644
--- a/core/admin/requirements-prod.txt
+++ b/core/admin/requirements-prod.txt
@@ -36,6 +36,7 @@ marshmallow-sqlalchemy==0.24.1
passlib==1.7.4
psycopg2==2.8.2
pycparser==2.19
+Pygments==2.8.1
pyOpenSSL==19.0.0
python-dateutil==2.8.0
python-editor==1.0.4
diff --git a/core/admin/requirements.txt b/core/admin/requirements.txt
index f9d175b3..46500295 100644
--- a/core/admin/requirements.txt
+++ b/core/admin/requirements.txt
@@ -17,6 +17,7 @@ gunicorn
tabulate
PyYAML
PyOpenSSL
+Pygments
dnspython
bcrypt
tenacity
From 8bc44455721471a7277196190bb31db9d48d4bce Mon Sep 17 00:00:00 2001
From: Alexander Graf
Date: Fri, 12 Mar 2021 17:56:17 +0100
Subject: [PATCH 070/181] Sync update of localpart, domain_name and email
---
core/admin/mailu/models.py | 18 +++++++++++++++---
core/admin/mailu/schemas.py | 4 ++--
2 files changed, 17 insertions(+), 5 deletions(-)
diff --git a/core/admin/mailu/models.py b/core/admin/mailu/models.py
index b08c5bd2..93efd016 100644
--- a/core/admin/mailu/models.py
+++ b/core/admin/mailu/models.py
@@ -355,9 +355,21 @@ class Email(object):
self.localpart, self.domain_name = value.rsplit('@', 1)
self._email = value
- # hack for email declared attr - when _email is not updated yet
- def __str__(self):
- return str(f'{self.localpart}@{self.domain_name}')
+ @staticmethod
+ def _update_localpart(target, value, *_):
+ if target.domain_name:
+ target._email = f'{value}@{target.domain_name}'
+
+ @staticmethod
+ def _update_domain_name(target, value, *_):
+ if target.localpart:
+ target._email = f'{target.localpart}@{value}'
+
+ @classmethod
+ def __declare_last__(cls):
+ # gets called after mappings are completed
+ sqlalchemy.event.listen(User.localpart, 'set', cls._update_localpart, propagate=True)
+ sqlalchemy.event.listen(User.domain_name, 'set', cls._update_domain_name, propagate=True)
def sendmail(self, subject, body):
""" send an email to the address """
diff --git a/core/admin/mailu/schemas.py b/core/admin/mailu/schemas.py
index 28a5d6f4..2742edf1 100644
--- a/core/admin/mailu/schemas.py
+++ b/core/admin/mailu/schemas.py
@@ -1030,8 +1030,8 @@ class BaseSchema(ma.SQLAlchemyAutoSchema, Storage):
self.opts.sqla_session.add(item)
return item
- # stop early if item has no password attribute
- if not hasattr(item, 'password'):
+ # stop early when not updating or item has no password attribute
+ if not self.context.get('update') or not hasattr(item, 'password'):
return item
# did we hash a new plaintext password?
From 83b1fbb9d67b6752a7d8bb4f8e41e3d35e56ed4d Mon Sep 17 00:00:00 2001
From: Alexander Graf
Date: Sun, 14 Mar 2021 18:09:21 +0100
Subject: [PATCH 071/181] Lazy loading of KVSessionExtension
- call cleanup_sessions on first kvstore access
this allows to run cmdline actions without redis (and makes it faster)
- Allow development using DictStore by setting REDIS_ADDRESS to the empty string in env
- don't sign 64bit random session id as suggested by nextgens
---
core/admin/mailu/__init__.py | 12 +++---
core/admin/mailu/utils.py | 65 +++++++++++++++++++++++++++++++-
core/admin/requirements-prod.txt | 1 +
3 files changed, 70 insertions(+), 8 deletions(-)
diff --git a/core/admin/mailu/__init__.py b/core/admin/mailu/__init__.py
index f9ca2466..40cc9cff 100644
--- a/core/admin/mailu/__init__.py
+++ b/core/admin/mailu/__init__.py
@@ -1,8 +1,8 @@
+""" Mailu admin app
+"""
+
import flask
import flask_bootstrap
-import redis
-from flask_kvsession import KVSessionExtension
-from simplekv.memory.redisstore import RedisStore
from mailu import utils, debug, models, manage, configuration
@@ -20,7 +20,8 @@ def create_app_from_config(config):
# Initialize application extensions
config.init_app(app)
models.db.init_app(app)
- KVSessionExtension(RedisStore(redis.StrictRedis().from_url('redis://{0}/3'.format(config['REDIS_ADDRESS']))), app).cleanup_sessions(app)
+ utils.kvsession.init_kvstore(config)
+ utils.kvsession.init_app(app)
utils.limiter.init_app(app)
utils.babel.init_app(app)
utils.login.init_app(app)
@@ -53,8 +54,7 @@ def create_app_from_config(config):
def create_app():
- """ Create a new application based on the config module
+ """ Create a new application based on the config module
"""
config = configuration.ConfigManager()
return create_app_from_config(config)
-
diff --git a/core/admin/mailu/utils.py b/core/admin/mailu/utils.py
index ce12a09a..852fe8ad 100644
--- a/core/admin/mailu/utils.py
+++ b/core/admin/mailu/utils.py
@@ -1,11 +1,18 @@
-from mailu import models, limiter
+""" Mailu admin app utilities
+"""
+
+from mailu import limiter
import flask
import flask_login
-import flask_script
import flask_migrate
import flask_babel
+import flask_kvsession
+import redis
+from simplekv.memory import DictStore
+from simplekv.memory.redisstore import RedisStore
+from itsdangerous.encoding import want_bytes
from werkzeug.contrib import fixers
@@ -33,6 +40,10 @@ def get_locale():
# Proxy fixer
class PrefixMiddleware(object):
+ """ fix proxy headers """
+ def __init__(self):
+ self.app = None
+
def __call__(self, environ, start_response):
prefix = environ.get('HTTP_X_FORWARDED_PREFIX', '')
if prefix:
@@ -48,3 +59,53 @@ proxy = PrefixMiddleware()
# Data migrate
migrate = flask_migrate.Migrate()
+
+
+# session store
+class NullSigner(object):
+ """NullSigner does not sign nor unsign"""
+ def __init__(self, *args, **kwargs):
+ pass
+ def sign(self, value):
+ """Signs the given string."""
+ return want_bytes(value)
+ def unsign(self, signed_value):
+ """Unsigns the given string."""
+ return want_bytes(signed_value)
+
+class KVSessionIntf(flask_kvsession.KVSessionInterface):
+ """ KVSession interface allowing to run int function on first access """
+ def __init__(self, app, init_fn=None):
+ if init_fn:
+ app.kvsession_init = init_fn
+ else:
+ self._first_run(None)
+ def _first_run(self, app):
+ if app:
+ app.kvsession_init()
+ self.open_session = super().open_session
+ self.save_session = super().save_session
+ def open_session(self, app, request):
+ self._first_run(app)
+ return super().open_session(app, request)
+ def save_session(self, app, session, response):
+ self._first_run(app)
+ return super().save_session(app, session, response)
+
+class KVSessionExt(flask_kvsession.KVSessionExtension):
+ """ Activates Flask-KVSession for an application. """
+ def init_kvstore(self, config):
+ """ Initialize kvstore - fallback to DictStore without REDIS_ADDRESS """
+ if addr := config.get('REDIS_ADDRESS'):
+ self.default_kvstore = RedisStore(redis.StrictRedis().from_url(f'redis://{addr}/3'))
+ else:
+ self.default_kvstore = DictStore()
+
+ def init_app(self, app, session_kvstore=None):
+ """ Initialize application and KVSession. """
+ super().init_app(app, session_kvstore)
+ app.session_interface = KVSessionIntf(app, self.cleanup_sessions)
+
+kvsession = KVSessionExt()
+
+flask_kvsession.Signer = NullSigner
diff --git a/core/admin/requirements-prod.txt b/core/admin/requirements-prod.txt
index 54cf9a14..cd084684 100644
--- a/core/admin/requirements-prod.txt
+++ b/core/admin/requirements-prod.txt
@@ -39,6 +39,7 @@ python-editor==1.0.4
pytz==2019.1
PyYAML==5.1
redis==3.2.1
+simplekv==0.14.1
#alpine3:12 provides six==1.15.0
#six==1.12.0
socrate==0.1.1
From f0f79b23a328064a787d99b3755d43a946a87d9b Mon Sep 17 00:00:00 2001
From: Alexander Graf
Date: Sun, 14 Mar 2021 21:38:16 +0100
Subject: [PATCH 072/181] Allow cleanup of sessions by key&value in data
This can be used to delete all sessions belonging to a user/login.
For no it just iterates over all sessions.
This could be enhanced by using a prefix for and deleting by prefix.
---
core/admin/mailu/utils.py | 32 +++++++++++++++++++++++++++++++-
1 file changed, 31 insertions(+), 1 deletion(-)
diff --git a/core/admin/mailu/utils.py b/core/admin/mailu/utils.py
index 852fe8ad..9394c38f 100644
--- a/core/admin/mailu/utils.py
+++ b/core/admin/mailu/utils.py
@@ -1,6 +1,8 @@
""" Mailu admin app utilities
"""
+from datetime import datetime
+
from mailu import limiter
import flask
@@ -22,6 +24,7 @@ login.login_view = "ui.login"
@login.unauthorized_handler
def handle_needs_login():
+ """ redirect unauthorized requests to login page """
return flask.redirect(
flask.url_for('ui.login', next=flask.request.endpoint)
)
@@ -34,7 +37,8 @@ babel = flask_babel.Babel()
@babel.localeselector
def get_locale():
- translations = list(map(str, babel.list_translations()))
+ """ selects locale for translation """
+ translations = [str(translation) for translation in babel.list_translations()]
return flask.request.accept_languages.best_match(translations)
@@ -101,6 +105,32 @@ class KVSessionExt(flask_kvsession.KVSessionExtension):
else:
self.default_kvstore = DictStore()
+ def cleanup_sessions(self, app=None, dkey=None, dvalue=None):
+ """ Remove sessions from the store. """
+ if not app:
+ app = flask.current_app
+ if dkey is None and dvalue is None:
+ now = datetime.utcnow()
+ for key in app.kvsession_store.keys():
+ try:
+ sid = flask_kvsession.SessionID.unserialize(key)
+ except ValueError:
+ pass
+ else:
+ if sid.has_expired(
+ app.config['PERMANENT_SESSION_LIFETIME'],
+ now
+ ):
+ app.kvsession_store.delete(key)
+ elif dkey is not None and dvalue is not None:
+ for key in app.kvsession_store.keys():
+ if app.session_interface.serialization_method.loads(
+ app.kvsession_store.get(key)
+ ).get(dkey, None) == dvalue:
+ app.kvsession_store.delete(key)
+ else:
+ raise ValueError('Need dkey and dvalue.')
+
def init_app(self, app, session_kvstore=None):
""" Initialize application and KVSession. """
super().init_app(app, session_kvstore)
From 4b71bd56c49dceef38f5cc4267d22936ecaf0bc4 Mon Sep 17 00:00:00 2001
From: Alexander Graf
Date: Sun, 4 Apr 2021 14:35:31 +0200
Subject: [PATCH 073/181] replace flask_kvsession with mailu's own storage
---
core/admin/mailu/__init__.py | 3 +-
core/admin/mailu/configuration.py | 5 +-
core/admin/mailu/utils.py | 408 +++++++++++++++++++++++++-----
core/admin/requirements-prod.txt | 2 -
core/admin/requirements.txt | 1 -
5 files changed, 343 insertions(+), 76 deletions(-)
diff --git a/core/admin/mailu/__init__.py b/core/admin/mailu/__init__.py
index 40cc9cff..690837ea 100644
--- a/core/admin/mailu/__init__.py
+++ b/core/admin/mailu/__init__.py
@@ -20,8 +20,7 @@ def create_app_from_config(config):
# Initialize application extensions
config.init_app(app)
models.db.init_app(app)
- utils.kvsession.init_kvstore(config)
- utils.kvsession.init_app(app)
+ utils.session.init_app(app)
utils.limiter.init_app(app)
utils.babel.init_app(app)
utils.login.init_app(app)
diff --git a/core/admin/mailu/configuration.py b/core/admin/mailu/configuration.py
index 6f65d17d..3d1b4fb5 100644
--- a/core/admin/mailu/configuration.py
+++ b/core/admin/mailu/configuration.py
@@ -14,6 +14,7 @@ DEFAULT_CONFIG = {
'DEBUG': False,
'DOMAIN_REGISTRATION': False,
'TEMPLATES_AUTO_RELOAD': True,
+ 'MEMORY_SESSIONS': False,
# Database settings
'DB_FLAVOR': None,
'DB_USER': 'mailu',
@@ -55,6 +56,7 @@ DEFAULT_CONFIG = {
'RECAPTCHA_PRIVATE_KEY': '',
# Advanced settings
'LOG_LEVEL': 'WARNING',
+ 'SESSION_KEY_BITS': 128,
'SESSION_LIFETIME': 24,
'SESSION_COOKIE_SECURE': True,
'CREDENTIAL_ROUNDS': 12,
@@ -65,7 +67,6 @@ DEFAULT_CONFIG = {
'HOST_SMTP': 'smtp',
'HOST_AUTHSMTP': 'smtp',
'HOST_ADMIN': 'admin',
- 'WEBMAIL': 'none',
'HOST_WEBMAIL': 'webmail',
'HOST_WEBDAV': 'webdav:5232',
'HOST_REDIS': 'redis',
@@ -136,9 +137,9 @@ class ConfigManager(dict):
self.config['RATELIMIT_STORAGE_URL'] = 'redis://{0}/2'.format(self.config['REDIS_ADDRESS'])
self.config['QUOTA_STORAGE_URL'] = 'redis://{0}/1'.format(self.config['REDIS_ADDRESS'])
+ self.config['SESSION_STORAGE_URL'] = 'redis://{0}/3'.format(self.config['REDIS_ADDRESS'])
self.config['SESSION_COOKIE_SAMESITE'] = 'Strict'
self.config['SESSION_COOKIE_HTTPONLY'] = True
- self.config['SESSION_KEY_BITS'] = 128
self.config['PERMANENT_SESSION_LIFETIME'] = timedelta(hours=int(self.config['SESSION_LIFETIME']))
# update the app config itself
app.config = self
diff --git a/core/admin/mailu/utils.py b/core/admin/mailu/utils.py
index 9394c38f..1deaa4ae 100644
--- a/core/admin/mailu/utils.py
+++ b/core/admin/mailu/utils.py
@@ -1,7 +1,14 @@
""" Mailu admin app utilities
"""
-from datetime import datetime
+try:
+ import cPickle as pickle
+except ImportError:
+ import pickle
+
+import hashlib
+import secrets
+import time
from mailu import limiter
@@ -9,12 +16,11 @@ import flask
import flask_login
import flask_migrate
import flask_babel
-import flask_kvsession
import redis
-from simplekv.memory import DictStore
-from simplekv.memory.redisstore import RedisStore
+from flask.sessions import SessionMixin, SessionInterface
from itsdangerous.encoding import want_bytes
+from werkzeug.datastructures import CallbackDict
from werkzeug.contrib import fixers
@@ -65,77 +71,341 @@ proxy = PrefixMiddleware()
migrate = flask_migrate.Migrate()
-# session store
-class NullSigner(object):
- """NullSigner does not sign nor unsign"""
- def __init__(self, *args, **kwargs):
- pass
- def sign(self, value):
- """Signs the given string."""
- return want_bytes(value)
- def unsign(self, signed_value):
- """Unsigns the given string."""
- return want_bytes(signed_value)
+# session store (inspired by https://github.com/mbr/flask-kvsession)
+class RedisStore:
+ """ Stores session data in a redis db. """
-class KVSessionIntf(flask_kvsession.KVSessionInterface):
- """ KVSession interface allowing to run int function on first access """
- def __init__(self, app, init_fn=None):
- if init_fn:
- app.kvsession_init = init_fn
+ has_ttl = True
+
+ def __init__(self, redisstore):
+ self.redis = redisstore
+
+ def get(self, key):
+ """ load item from store. """
+ value = self.redis.get(key)
+ if value is None:
+ raise KeyError(key)
+ return value
+
+ def put(self, key, value, ttl_secs=None):
+ """ save item to store. """
+ if ttl_secs:
+ self.redis.setex(key, int(ttl_secs), value)
else:
- self._first_run(None)
- def _first_run(self, app):
- if app:
- app.kvsession_init()
- self.open_session = super().open_session
- self.save_session = super().save_session
- def open_session(self, app, request):
- self._first_run(app)
- return super().open_session(app, request)
- def save_session(self, app, session, response):
- self._first_run(app)
- return super().save_session(app, session, response)
+ self.redis.set(key, value)
-class KVSessionExt(flask_kvsession.KVSessionExtension):
- """ Activates Flask-KVSession for an application. """
- def init_kvstore(self, config):
- """ Initialize kvstore - fallback to DictStore without REDIS_ADDRESS """
- if addr := config.get('REDIS_ADDRESS'):
- self.default_kvstore = RedisStore(redis.StrictRedis().from_url(f'redis://{addr}/3'))
- else:
- self.default_kvstore = DictStore()
+ def delete(self, key):
+ """ delete item from store. """
+ self.redis.delete(key)
- def cleanup_sessions(self, app=None, dkey=None, dvalue=None):
- """ Remove sessions from the store. """
- if not app:
+ def list(self, prefix=None):
+ """ return list of keys starting with prefix """
+ if prefix:
+ prefix += b'*'
+ return list(self.redis.scan_iter(match=prefix))
+
+class DictStore:
+ """ Stores session data in a python dict. """
+
+ has_ttl = False
+
+ def __init__(self):
+ self.dict = {}
+
+ def get(self, key):
+ """ load item from store. """
+ return self.dict[key]
+
+ def put(self, key, value, ttl_secs=None):
+ """ save item to store. """
+ self.dict[key] = value
+
+ def delete(self, key):
+ """ delete item from store. """
+ try:
+ del self.dict[key]
+ except KeyError:
+ pass
+
+ def list(self, prefix=None):
+ """ return list of keys starting with prefix """
+ if prefix is None:
+ return list(self.dict.keys())
+ return [key for key in self.dict if key.startswith(prefix)]
+
+class MailuSession(CallbackDict, SessionMixin):
+ """ Custom flask session storage. """
+
+ # default modified to false
+ modified = False
+
+ def __init__(self, key=None, app=None):
+
+ self.app = app or flask.current_app
+
+ initial = None
+
+ key = want_bytes(key)
+ if parsed := self.app.session_config.parse_key(key, self.app):
+ try:
+ initial = pickle.loads(app.session_store.get(key))
+ except (KeyError, EOFError, pickle.UnpicklingError):
+ # either the cookie was manipulated or we did not find the
+ # session in the backend or the pickled data is invalid.
+ # => start new session
+ pass
+ else:
+ (self._uid, self._sid, self._created) = parsed
+ self._key = key
+
+ if initial is None:
+ # start new session
+ self.new = True
+ self._uid = None
+ self._sid = None
+ self._created = self.app.session_config.gen_created()
+ self._key = None
+
+ def _on_update(obj):
+ obj.modified = True
+
+ CallbackDict.__init__(self, initial, _on_update)
+
+ @property
+ def sid(self):
+ """ this reflects the session's id. """
+ if self._sid is None or self._uid is None or self._created is None:
+ return None
+ return b''.join([self._uid, self._sid, self._created])
+
+ def destroy(self):
+ """ destroy session for security reasons. """
+
+ if self._key is not None:
+ self.app.session_store.delete(self._key)
+ self._key = None
+
+ self._uid = None
+ self._sid = None
+ self._created = None
+
+ self.clear()
+
+ self.modified = False
+ self.new = False
+
+ def regenerate(self):
+ """ generate new id for session to avoid `session fixation`. """
+
+ if self._key is not None:
+ self.app.session_store.delete(self._key)
+ self._key = None
+
+ self._sid = None
+ self._created = self.app.session_config.gen_created()
+
+ self.modified = True
+
+ def save(self):
+ """ Save session to store. """
+
+ # don't save if session was destroyed or is not modified
+ if self._created is None or not self.modified:
+ return False
+
+ # set uid from dict data
+ if self._uid is None:
+ self._uid = self.app.session_config.gen_uid(self.get('user_id', ''))
+
+ # create new session id for new or regenerated sessions
+ if self._sid is None:
+ self._sid = self.app.session_config.gen_sid()
+
+ # set created if permanent state changed
+ if self.permanent:
+ if self._created:
+ self._created = b''
+ elif not self._created:
+ self._created = self.app.session_config.gen_created()
+
+ # get new session key
+ key = self.sid
+
+ # delete old session if key has changed
+ if key != self._key and self._key is not None:
+ self.app.session_store.delete(self._key)
+
+ # save session
+ self.app.session_store.put(
+ key,
+ pickle.dumps(dict(self)),
+ None if self.permanent else self.app.permanent_session_lifetime.total_seconds()
+ )
+
+ self._key = key
+
+ self.new = False
+ self.modified = False
+
+ return True
+
+class MailuSessionConfig:
+ """ Stores sessions crypto config """
+
+ def __init__(self, app=None):
+
+ if app is None:
app = flask.current_app
- if dkey is None and dvalue is None:
- now = datetime.utcnow()
- for key in app.kvsession_store.keys():
- try:
- sid = flask_kvsession.SessionID.unserialize(key)
- except ValueError:
- pass
- else:
- if sid.has_expired(
- app.config['PERMANENT_SESSION_LIFETIME'],
- now
- ):
- app.kvsession_store.delete(key)
- elif dkey is not None and dvalue is not None:
- for key in app.kvsession_store.keys():
- if app.session_interface.serialization_method.loads(
- app.kvsession_store.get(key)
- ).get(dkey, None) == dvalue:
- app.kvsession_store.delete(key)
+
+ bits = app.config.get('SESSION_KEY_BITS', 64)
+
+ if bits < 64:
+ raise ValueError('Session id entropy must not be less than 64 bits!')
+
+ hash_bytes = bits//8 + (bits%8>0)
+ time_bytes = 4 # 32 bit timestamp for now
+
+ self._shake_fn = hashlib.shake_256 if bits>128 else hashlib.shake_128
+ self._hash_len = hash_bytes
+ self._hash_b64 = len(self._encode(bytes(hash_bytes)))
+ self._key_min = 2*self._hash_b64
+ self._key_max = self._key_min + len(self._encode(bytes(time_bytes)))
+
+ def gen_sid(self):
+ """ Generate random session id. """
+ return self._encode(secrets.token_bytes(self._hash_len))
+
+ def gen_uid(self, uid):
+ """ Generate hashed user id part of session key. """
+ return self._encode(self._shake_fn(want_bytes(uid)).digest(self._hash_len))
+
+ def gen_created(self, now=None):
+ """ Generate base64 representation of creation time. """
+ return self._encode(int(now or time.time()).to_bytes(8, byteorder='big').lstrip(b'\0'))
+
+ def parse_key(self, key, app=None, now=None):
+ """ Split key into sid, uid and creation time. """
+
+ if not (isinstance(key, bytes) and self._key_min <= len(key) <= self._key_max):
+ return None
+
+ uid = key[:self._hash_b64]
+ sid = key[self._hash_b64:self._key_min]
+ crt = key[self._key_min:]
+
+ # validate if parts are decodeable
+ created = self._decode(crt)
+ if created is None or self._decode(uid) is None or self._decode(sid) is None:
+ return None
+
+ # validate creation time when requested or store does not support ttl
+ if now is not None or not app.session_store.has_ttl:
+ created = int.from_bytes(created, byteorder='big')
+ if created > 0:
+ if now is None:
+ now = int(time.time())
+ if created < now < created + app.permanent_session_lifetime.total_seconds():
+ return None
+
+ return (uid, sid, crt)
+
+ def _encode(self, value):
+ return secrets.base64.urlsafe_b64encode(value).rstrip(b'=')
+
+ def _decode(self, value):
+ try:
+ return secrets.base64.urlsafe_b64decode(value + b'='*(4-len(value)%4))
+ except secrets.binascii.Error:
+ return None
+
+class MailuSessionInterface(SessionInterface):
+ """ Custom flask session interface. """
+
+ def open_session(self, app, request):
+ """ Load or create session. """
+ return MailuSession(request.cookies.get(app.config['SESSION_COOKIE_NAME'], None), app)
+
+ def save_session(self, app, session, response):
+ """ Save modified session. """
+
+ if session.save():
+ # session saved. update cookie
+ response.set_cookie(
+ key=app.config['SESSION_COOKIE_NAME'],
+ value=session.sid,
+ expires=self.get_expiration_time(app, session),
+ path=self.get_cookie_path(app),
+ domain=self.get_cookie_domain(app),
+ secure=app.config['SESSION_COOKIE_SECURE'],
+ httponly=app.config['SESSION_COOKIE_HTTPONLY']
+ )
+
+class MailuSessionExtension:
+ """ Server side session handling """
+
+ @staticmethod
+ def cleanup_sessions(app=None):
+ """ Remove invalid or expired sessions. """
+
+ app = app or flask.current_app
+ now = int(time.time())
+
+ count = 0
+ for key in app.session_store.list():
+ if not app.session_config.parse_key(key, app, now):
+ app.session_store.delete(key)
+ count += 1
+
+ return count
+
+ @staticmethod
+ def prune_sessions(uid=None, keep_permanent=False, keep=None, app=None):
+ """ Remove sessions
+ uid: remove all sessions (NONE) or sessions belonging to a specific user
+ keep_permanent: also delete permanent sessions?
+ keep: keep listed sessions
+ """
+
+ keep = keep or set()
+ app = app or flask.current_app
+ now = int(time.time())
+
+ prefix = None if uid is None else app.session_config.gen_uid(uid)
+
+ count = 0
+ for key in app.session_store.list(prefix):
+ if key in keep:
+ continue
+ if keep_permanent:
+ if parsed := app.session_config.parse_key(key, app, now):
+ if not parsed[2]:
+ continue
+ app.session_store.delete(key)
+ count += 1
+
+ return count
+
+ def init_app(self, app):
+ """ Replace session management of application. """
+
+ if app.config.get('MEMORY_SESSIONS'):
+ # in-memory session store for use in development
+ app.session_store = DictStore()
+
else:
- raise ValueError('Need dkey and dvalue.')
+ # redis-based session store for use in production
+ app.session_store = RedisStore(
+ redis.StrictRedis().from_url(app.config['SESSION_STORAGE_URL'])
+ )
- def init_app(self, app, session_kvstore=None):
- """ Initialize application and KVSession. """
- super().init_app(app, session_kvstore)
- app.session_interface = KVSessionIntf(app, self.cleanup_sessions)
+ # clean expired sessions on first use in case lifetime was changed
+ def cleaner():
+ MailuSessionExtension.cleanup_sessions(app)
-kvsession = KVSessionExt()
+ # TODO: hmm. this will clean once per gunicorn worker
+ app.before_first_request(cleaner)
-flask_kvsession.Signer = NullSigner
+ app.session_config = MailuSessionConfig(app)
+ app.session_interface = MailuSessionInterface()
+
+session = MailuSessionExtension()
diff --git a/core/admin/requirements-prod.txt b/core/admin/requirements-prod.txt
index cd084684..f767f431 100644
--- a/core/admin/requirements-prod.txt
+++ b/core/admin/requirements-prod.txt
@@ -13,7 +13,6 @@ Flask==1.0.2
Flask-Babel==0.12.2
Flask-Bootstrap==3.3.7.1
Flask-DebugToolbar==0.10.1
-Flask-KVSession==0.6.2
Flask-Limiter==1.0.1
Flask-Login==0.4.1
Flask-Migrate==2.4.0
@@ -39,7 +38,6 @@ python-editor==1.0.4
pytz==2019.1
PyYAML==5.1
redis==3.2.1
-simplekv==0.14.1
#alpine3:12 provides six==1.15.0
#six==1.12.0
socrate==0.1.1
diff --git a/core/admin/requirements.txt b/core/admin/requirements.txt
index abb37234..9739ed3f 100644
--- a/core/admin/requirements.txt
+++ b/core/admin/requirements.txt
@@ -3,7 +3,6 @@ Flask-Login
Flask-SQLAlchemy
Flask-bootstrap
Flask-Babel
-Flask-KVSession
Flask-migrate
Flask-script
Flask-wtf
From 4b8bbf760b423657a564b478cf72cbbc0125e7c3 Mon Sep 17 00:00:00 2001
From: Alexander Graf
Date: Sun, 4 Apr 2021 14:40:49 +0200
Subject: [PATCH 074/181] default to 128 bits
---
core/admin/mailu/configuration.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/core/admin/mailu/configuration.py b/core/admin/mailu/configuration.py
index 3d1b4fb5..679c6c7e 100644
--- a/core/admin/mailu/configuration.py
+++ b/core/admin/mailu/configuration.py
@@ -140,6 +140,7 @@ class ConfigManager(dict):
self.config['SESSION_STORAGE_URL'] = 'redis://{0}/3'.format(self.config['REDIS_ADDRESS'])
self.config['SESSION_COOKIE_SAMESITE'] = 'Strict'
self.config['SESSION_COOKIE_HTTPONLY'] = True
+ self.config['SESSION_KEY_BITS'] = 128
self.config['PERMANENT_SESSION_LIFETIME'] = timedelta(hours=int(self.config['SESSION_LIFETIME']))
# update the app config itself
app.config = self
From 731ce8ede91a7c3e37c8efc0161c0e6b06d330fc Mon Sep 17 00:00:00 2001
From: Alexander Graf
Date: Sun, 4 Apr 2021 18:02:43 +0200
Subject: [PATCH 075/181] fix permanent sessions. hash uid using SECRET_KEY
clean session in redis only once when starting
---
core/admin/mailu/utils.py | 140 +++++++++++++++++++++-----------------
1 file changed, 79 insertions(+), 61 deletions(-)
diff --git a/core/admin/mailu/utils.py b/core/admin/mailu/utils.py
index 1deaa4ae..30725ff7 100644
--- a/core/admin/mailu/utils.py
+++ b/core/admin/mailu/utils.py
@@ -10,6 +10,8 @@ import hashlib
import secrets
import time
+from multiprocessing import Value
+
from mailu import limiter
import flask
@@ -87,10 +89,10 @@ class RedisStore:
raise KeyError(key)
return value
- def put(self, key, value, ttl_secs=None):
+ def put(self, key, value, ttl=None):
""" save item to store. """
- if ttl_secs:
- self.redis.setex(key, int(ttl_secs), value)
+ if ttl:
+ self.redis.setex(key, int(ttl), value)
else:
self.redis.set(key, value)
@@ -171,6 +173,11 @@ class MailuSession(CallbackDict, SessionMixin):
CallbackDict.__init__(self, initial, _on_update)
+ @property
+ def saved(self):
+ """ this reflects if the session was saved. """
+ return self._key is not None
+
@property
def sid(self):
""" this reflects the session's id. """
@@ -181,9 +188,7 @@ class MailuSession(CallbackDict, SessionMixin):
def destroy(self):
""" destroy session for security reasons. """
- if self._key is not None:
- self.app.session_store.delete(self._key)
- self._key = None
+ self.delete()
self._uid = None
self._sid = None
@@ -191,28 +196,28 @@ class MailuSession(CallbackDict, SessionMixin):
self.clear()
- self.modified = False
+ self.modified = True
self.new = False
def regenerate(self):
""" generate new id for session to avoid `session fixation`. """
- if self._key is not None:
- self.app.session_store.delete(self._key)
- self._key = None
+ self.delete()
self._sid = None
self._created = self.app.session_config.gen_created()
self.modified = True
+ def delete(self):
+ """ Delete stored session. """
+ if self.saved:
+ self.app.session_store.delete(self._key)
+ self._key = None
+
def save(self):
""" Save session to store. """
- # don't save if session was destroyed or is not modified
- if self._created is None or not self.modified:
- return False
-
# set uid from dict data
if self._uid is None:
self._uid = self.app.session_config.gen_uid(self.get('user_id', ''))
@@ -221,25 +226,18 @@ class MailuSession(CallbackDict, SessionMixin):
if self._sid is None:
self._sid = self.app.session_config.gen_sid()
- # set created if permanent state changed
- if self.permanent:
- if self._created:
- self._created = b''
- elif not self._created:
- self._created = self.app.session_config.gen_created()
-
# get new session key
key = self.sid
# delete old session if key has changed
- if key != self._key and self._key is not None:
- self.app.session_store.delete(self._key)
+ if key != self._key:
+ self.delete()
# save session
self.app.session_store.put(
key,
pickle.dumps(dict(self)),
- None if self.permanent else self.app.permanent_session_lifetime.total_seconds()
+ self.app.permanent_session_lifetime.total_seconds()
)
self._key = key
@@ -247,8 +245,6 @@ class MailuSession(CallbackDict, SessionMixin):
self.new = False
self.modified = False
- return True
-
class MailuSessionConfig:
""" Stores sessions crypto config """
@@ -264,8 +260,9 @@ class MailuSessionConfig:
hash_bytes = bits//8 + (bits%8>0)
time_bytes = 4 # 32 bit timestamp for now
+ shaker = hashlib.shake_256 if bits>128 else hashlib.shake_128
- self._shake_fn = hashlib.shake_256 if bits>128 else hashlib.shake_128
+ self._shaker = shaker(want_bytes(app.config.get('SECRET_KEY', '')))
self._hash_len = hash_bytes
self._hash_b64 = len(self._encode(bytes(hash_bytes)))
self._key_min = 2*self._hash_b64
@@ -277,13 +274,15 @@ class MailuSessionConfig:
def gen_uid(self, uid):
""" Generate hashed user id part of session key. """
- return self._encode(self._shake_fn(want_bytes(uid)).digest(self._hash_len))
+ shaker = self._shaker.copy()
+ shaker.update(want_bytes(uid))
+ return self._encode(shaker.digest(self._hash_len))
def gen_created(self, now=None):
""" Generate base64 representation of creation time. """
return self._encode(int(now or time.time()).to_bytes(8, byteorder='big').lstrip(b'\0'))
- def parse_key(self, key, app=None, now=None):
+ def parse_key(self, key, app=None, validate=False, now=None):
""" Split key into sid, uid and creation time. """
if not (isinstance(key, bytes) and self._key_min <= len(key) <= self._key_max):
@@ -299,13 +298,12 @@ class MailuSessionConfig:
return None
# validate creation time when requested or store does not support ttl
- if now is not None or not app.session_store.has_ttl:
+ if validate or not app.session_store.has_ttl:
+ if now is None:
+ now = int(time.time())
created = int.from_bytes(created, byteorder='big')
- if created > 0:
- if now is None:
- now = int(time.time())
- if created < now < created + app.permanent_session_lifetime.total_seconds():
- return None
+ if not (created < now < created + app.permanent_session_lifetime.total_seconds()):
+ return None
return (uid, sid, crt)
@@ -328,17 +326,40 @@ class MailuSessionInterface(SessionInterface):
def save_session(self, app, session, response):
""" Save modified session. """
- if session.save():
- # session saved. update cookie
- response.set_cookie(
- key=app.config['SESSION_COOKIE_NAME'],
- value=session.sid,
- expires=self.get_expiration_time(app, session),
- path=self.get_cookie_path(app),
- domain=self.get_cookie_domain(app),
- secure=app.config['SESSION_COOKIE_SECURE'],
- httponly=app.config['SESSION_COOKIE_HTTPONLY']
- )
+ # If the session is modified to be empty, remove the cookie.
+ # If the session is empty, return without setting the cookie.
+ if not session:
+ if session.modified:
+ session.delete()
+ response.delete_cookie(
+ app.session_cookie_name,
+ domain=self.get_cookie_domain(app),
+ path=self.get_cookie_path(app),
+ )
+ return
+
+ # Add a "Vary: Cookie" header if the session was accessed
+ if session.accessed:
+ response.vary.add('Cookie')
+
+ # TODO: set cookie from time to time to prevent expiration in browser
+ # also update expire in redis
+
+ if not self.should_set_cookie(app, session):
+ return
+
+ # save session and update cookie
+ session.save()
+ response.set_cookie(
+ app.session_cookie_name,
+ session.sid,
+ expires=self.get_expiration_time(app, session),
+ httponly=self.get_cookie_httponly(app),
+ domain=self.get_cookie_domain(app),
+ path=self.get_cookie_path(app),
+ secure=self.get_cookie_secure(app),
+ samesite=self.get_cookie_samesite(app)
+ )
class MailuSessionExtension:
""" Server side session handling """
@@ -352,36 +373,29 @@ class MailuSessionExtension:
count = 0
for key in app.session_store.list():
- if not app.session_config.parse_key(key, app, now):
+ if not app.session_config.parse_key(key, app, validate=True, now=now):
app.session_store.delete(key)
count += 1
return count
@staticmethod
- def prune_sessions(uid=None, keep_permanent=False, keep=None, app=None):
+ def prune_sessions(uid=None, keep=None, app=None):
""" Remove sessions
uid: remove all sessions (NONE) or sessions belonging to a specific user
- keep_permanent: also delete permanent sessions?
keep: keep listed sessions
"""
keep = keep or set()
app = app or flask.current_app
- now = int(time.time())
prefix = None if uid is None else app.session_config.gen_uid(uid)
count = 0
for key in app.session_store.list(prefix):
- if key in keep:
- continue
- if keep_permanent:
- if parsed := app.session_config.parse_key(key, app, now):
- if not parsed[2]:
- continue
- app.session_store.delete(key)
- count += 1
+ if key not in keep:
+ app.session_store.delete(key)
+ count += 1
return count
@@ -398,14 +412,18 @@ class MailuSessionExtension:
redis.StrictRedis().from_url(app.config['SESSION_STORAGE_URL'])
)
- # clean expired sessions on first use in case lifetime was changed
+ # clean expired sessions oonce on first use in case lifetime was changed
def cleaner():
- MailuSessionExtension.cleanup_sessions(app)
+ with cleaned.get_lock():
+ if not cleaned.value:
+ cleaned.value = True
+ flask.current_app.logger.error('cleaning')
+ MailuSessionExtension.cleanup_sessions(app)
- # TODO: hmm. this will clean once per gunicorn worker
app.before_first_request(cleaner)
app.session_config = MailuSessionConfig(app)
app.session_interface = MailuSessionInterface()
+cleaned = Value('i', False)
session = MailuSessionExtension()
From 1f2aee278c1a2dc1ab0d232433f448264a29ba9f Mon Sep 17 00:00:00 2001
From: Linus Gasser
Date: Thu, 13 May 2021 18:59:18 +0200
Subject: [PATCH 076/181] Reflect override settings for postfix
Also added a stumbling stone when changing postfix.cf
---
docs/faq.rst | 6 ++++--
1 file changed, 4 insertions(+), 2 deletions(-)
diff --git a/docs/faq.rst b/docs/faq.rst
index 9c4f1d75..5b13f191 100644
--- a/docs/faq.rst
+++ b/docs/faq.rst
@@ -258,9 +258,11 @@ Postfix, Dovecot, Nginx and Rspamd support overriding configuration files. Overr
correct syntax. The following file names will be taken as override configuration:
- `Postfix`_ :
- - ``main.cf`` as ``$ROOT/overrides/postfix/postfix.cf``
- - ``master.cf`` as ``$ROOT/overrides/postfix/postfix.master``
+ - ``main.cf`` as ``$ROOT/overrides/postfix.cf``
+ - ``master.cf`` as ``$ROOT/overrides/postfix.master``
- All ``$ROOT/overrides/postfix/*.map`` files
+ - For both ``postfix.cf`` and ``postfix.master``, you need to put one configuration per line, as they are fed line-by-line
+ to postfix.
- `Dovecot`_ - ``dovecot.conf`` in dovecot sub-directory;
- `Nginx`_ - All ``*.conf`` files in the ``nginx`` sub-directory;
- `Rspamd`_ - All files in the ``rspamd`` sub-directory.
From ae9206e968b10de6c456a76f914f8d86dff02f6f Mon Sep 17 00:00:00 2001
From: Florent Daigniere
Date: Wed, 10 Feb 2021 13:51:07 +0100
Subject: [PATCH 077/181] Implement a simple credential cache
---
core/admin/mailu/models.py | 23 +++++++++++++++++++++++
towncrier/newsfragments/1194.feature | 1 +
2 files changed, 24 insertions(+)
create mode 100644 towncrier/newsfragments/1194.feature
diff --git a/core/admin/mailu/models.py b/core/admin/mailu/models.py
index a63c33a5..c7787e74 100644
--- a/core/admin/mailu/models.py
+++ b/core/admin/mailu/models.py
@@ -305,6 +305,7 @@ class User(Base, Email):
"""
__tablename__ = "user"
_ctx = None
+ _credential_cache = {}
domain = db.relationship(Domain,
backref=db.backref('users', cascade='all, delete-orphan'))
@@ -382,6 +383,17 @@ class User(Base, Email):
return User._ctx
def check_password(self, password):
+ cache_result = self._credential_cache.get(self.get_id())
+ current_salt = self.password.split('$')[3] if len(self.password.split('$')) == 5 else None
+ if cache_result and current_salt:
+ cache_salt, cache_hash = cache_result
+ if cache_salt == current_salt:
+ return hash.pbkdf2_sha256.verify(password, cache_hash)
+ else:
+ # the cache is local per gunicorn; the password has changed
+ # so the local cache can be invalidated
+ del self._credential_cache[self.get_id()]
+
reference = self.password
# strip {scheme} if that's something mailu has added
# passlib will identify *crypt based hashes just fine
@@ -396,6 +408,17 @@ class User(Base, Email):
self.password = new_hash
db.session.add(self)
db.session.commit()
+
+ if result:
+ """The credential cache uses a low number of rounds to be fast.
+While it's not meant to be persisted to cold-storage, no additional measures
+are taken to ensure it isn't (mlock(), encrypted swap, ...) on the basis that
+we have little control over GC and string interning anyways.
+
+ An attacker that can dump the process' memory is likely to find credentials
+in clear-text regardless of the presence of the cache.
+ """
+ self._credential_cache[self.get_id()] = (self.password.split('$')[3], hash.pbkdf2_sha256.using(rounds=1).hash(password))
return result
def set_password(self, password, hash_scheme=None, raw=False):
diff --git a/towncrier/newsfragments/1194.feature b/towncrier/newsfragments/1194.feature
new file mode 100644
index 00000000..0cd2a9e9
--- /dev/null
+++ b/towncrier/newsfragments/1194.feature
@@ -0,0 +1 @@
+Add a credential cache to speedup authentication requests.
From f52984e4c337e5a101aea82a0654fd731ab94164 Mon Sep 17 00:00:00 2001
From: Florent Daigniere
Date: Wed, 10 Feb 2021 16:10:10 +0100
Subject: [PATCH 078/181] In fact it could be global
---
core/admin/mailu/models.py | 8 ++++----
1 file changed, 4 insertions(+), 4 deletions(-)
diff --git a/core/admin/mailu/models.py b/core/admin/mailu/models.py
index c7787e74..a5ee1f57 100644
--- a/core/admin/mailu/models.py
+++ b/core/admin/mailu/models.py
@@ -17,7 +17,7 @@ import dns
db = flask_sqlalchemy.SQLAlchemy()
-
+_credential_cache = {}
class IdnaDomain(db.TypeDecorator):
""" Stores a Unicode string in it's IDNA representation (ASCII only)
@@ -383,7 +383,7 @@ class User(Base, Email):
return User._ctx
def check_password(self, password):
- cache_result = self._credential_cache.get(self.get_id())
+ cache_result = _credential_cache.get(self.get_id())
current_salt = self.password.split('$')[3] if len(self.password.split('$')) == 5 else None
if cache_result and current_salt:
cache_salt, cache_hash = cache_result
@@ -392,7 +392,7 @@ class User(Base, Email):
else:
# the cache is local per gunicorn; the password has changed
# so the local cache can be invalidated
- del self._credential_cache[self.get_id()]
+ del _credential_cache[self.get_id()]
reference = self.password
# strip {scheme} if that's something mailu has added
@@ -418,7 +418,7 @@ we have little control over GC and string interning anyways.
An attacker that can dump the process' memory is likely to find credentials
in clear-text regardless of the presence of the cache.
"""
- self._credential_cache[self.get_id()] = (self.password.split('$')[3], hash.pbkdf2_sha256.using(rounds=1).hash(password))
+ _credential_cache[self.get_id()] = (self.password.split('$')[3], hash.pbkdf2_sha256.using(rounds=1).hash(password))
return result
def set_password(self, password, hash_scheme=None, raw=False):
From 875308d40518dde4000a4f09286059e07f36de71 Mon Sep 17 00:00:00 2001
From: Florent Daigniere
Date: Fri, 4 Jun 2021 09:51:58 +0200
Subject: [PATCH 079/181] Revert "In fact it could be global"
This reverts commit f52984e4c337e5a101aea82a0654fd731ab94164.
---
core/admin/mailu/models.py | 8 ++++----
1 file changed, 4 insertions(+), 4 deletions(-)
diff --git a/core/admin/mailu/models.py b/core/admin/mailu/models.py
index a5ee1f57..c7787e74 100644
--- a/core/admin/mailu/models.py
+++ b/core/admin/mailu/models.py
@@ -17,7 +17,7 @@ import dns
db = flask_sqlalchemy.SQLAlchemy()
-_credential_cache = {}
+
class IdnaDomain(db.TypeDecorator):
""" Stores a Unicode string in it's IDNA representation (ASCII only)
@@ -383,7 +383,7 @@ class User(Base, Email):
return User._ctx
def check_password(self, password):
- cache_result = _credential_cache.get(self.get_id())
+ cache_result = self._credential_cache.get(self.get_id())
current_salt = self.password.split('$')[3] if len(self.password.split('$')) == 5 else None
if cache_result and current_salt:
cache_salt, cache_hash = cache_result
@@ -392,7 +392,7 @@ class User(Base, Email):
else:
# the cache is local per gunicorn; the password has changed
# so the local cache can be invalidated
- del _credential_cache[self.get_id()]
+ del self._credential_cache[self.get_id()]
reference = self.password
# strip {scheme} if that's something mailu has added
@@ -418,7 +418,7 @@ we have little control over GC and string interning anyways.
An attacker that can dump the process' memory is likely to find credentials
in clear-text regardless of the presence of the cache.
"""
- _credential_cache[self.get_id()] = (self.password.split('$')[3], hash.pbkdf2_sha256.using(rounds=1).hash(password))
+ self._credential_cache[self.get_id()] = (self.password.split('$')[3], hash.pbkdf2_sha256.using(rounds=1).hash(password))
return result
def set_password(self, password, hash_scheme=None, raw=False):
From ffa75620799669f7fe23aeaa75e4b90d55c1fe95 Mon Sep 17 00:00:00 2001
From: Linus Gasser
Date: Mon, 7 Jun 2021 07:57:30 +0200
Subject: [PATCH 080/181] configurations changed place in 1.8
---
docs/faq.rst | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/docs/faq.rst b/docs/faq.rst
index 5b13f191..e42bf309 100644
--- a/docs/faq.rst
+++ b/docs/faq.rst
@@ -258,8 +258,8 @@ Postfix, Dovecot, Nginx and Rspamd support overriding configuration files. Overr
correct syntax. The following file names will be taken as override configuration:
- `Postfix`_ :
- - ``main.cf`` as ``$ROOT/overrides/postfix.cf``
- - ``master.cf`` as ``$ROOT/overrides/postfix.master``
+ - ``main.cf`` as ``$ROOT/overrides/postfix/postfix.cf``
+ - ``master.cf`` as ``$ROOT/overrides/postfix/postfix.master``
- All ``$ROOT/overrides/postfix/*.map`` files
- For both ``postfix.cf`` and ``postfix.master``, you need to put one configuration per line, as they are fed line-by-line
to postfix.
From 21a362fdaea022404e4f8e55fb1def73f1c24d35 Mon Sep 17 00:00:00 2001
From: Dimitri Huisman
Date: Tue, 8 Jun 2021 07:09:07 +0000
Subject: [PATCH 081/181] Changed config-update to config-import in
config-import description.
---
docs/cli.rst | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/docs/cli.rst b/docs/cli.rst
index 10669108..957e47e4 100644
--- a/docs/cli.rst
+++ b/docs/cli.rst
@@ -212,13 +212,13 @@ mail-config.yml contains the configuration and looks like this:
comment: test
smtp: mx.example.com
-config-update shows the number of created/modified/deleted objects after import.
+config-import shows the number of created/modified/deleted objects after import.
To suppress all messages except error messages use ``--quiet``.
By adding the ``--verbose`` switch the import gets more detailed and shows exactly what attributes changed.
In all log messages plain-text secrets (dkim-keys, passwords) are hidden by default. Use ``--secrets`` to log secrets.
If you want to test what would be done when importing without committing any changes, use ``--dry-run``.
-By default config-update replaces the whole configuration. ``--update`` allows to modify the existing configuration instead.
+By default config-import replaces the whole configuration. ``--update`` allows to modify the existing configuration instead.
New elements will be added and existing elements will be modified.
It is possible to delete a single element or prune all elements from lists and associative arrays using a special notation:
From 2316ef1162d400fc486ad72df62fbd1fbd46afce Mon Sep 17 00:00:00 2001
From: lub
Date: Wed, 16 Jun 2021 14:21:55 +0200
Subject: [PATCH 082/181] update compression algorithms for dovecot 3.3.14
xz is deprecated; lz4 and zstd were not present in our configs before
---
core/dovecot/conf/dovecot.conf | 2 +-
setup/flavors/compose/mailu.env | 2 +-
tests/compose/core/mailu.env | 2 +-
tests/compose/fetchmail/mailu.env | 2 +-
tests/compose/filters/mailu.env | 2 +-
tests/compose/rainloop/mailu.env | 2 +-
tests/compose/roundcube/mailu.env | 2 +-
tests/compose/webdav/mailu.env | 2 +-
8 files changed, 8 insertions(+), 8 deletions(-)
diff --git a/core/dovecot/conf/dovecot.conf b/core/dovecot/conf/dovecot.conf
index ab5cb43a..50657088 100644
--- a/core/dovecot/conf/dovecot.conf
+++ b/core/dovecot/conf/dovecot.conf
@@ -50,7 +50,7 @@ plugin {
fts_autoindex_exclude = \Trash
{% endif %}
- {% if COMPRESSION in [ 'gz', 'bz2', 'xz', 'lz4' ] %}
+ {% if COMPRESSION in [ 'gz', 'bz2', 'lz4', 'zstd' ] %}
zlib_save = {{ COMPRESSION }}
{% endif %}
diff --git a/setup/flavors/compose/mailu.env b/setup/flavors/compose/mailu.env
index 04148b40..0aabf478 100644
--- a/setup/flavors/compose/mailu.env
+++ b/setup/flavors/compose/mailu.env
@@ -86,7 +86,7 @@ WELCOME_SUBJECT={{ welcome_subject or 'Welcome to your new email account' }}
WELCOME_BODY={{ welcome_body or 'Welcome to your new email account, if you can read this, then it is configured properly!' }}
# Maildir Compression
-# choose compression-method, default: none (value: gz, bz2, xz, lz4)
+# choose compression-method, default: none (value: gz, bz2, lz4, zstd)
COMPRESSION={{ compression }}
# change compression-level, default: 6 (value: 1-9)
COMPRESSION_LEVEL={{ compression_level }}
diff --git a/tests/compose/core/mailu.env b/tests/compose/core/mailu.env
index edea6a5c..a78515b8 100644
--- a/tests/compose/core/mailu.env
+++ b/tests/compose/core/mailu.env
@@ -92,7 +92,7 @@ DMARC_RUF=admin
# Maildir Compression
-# choose compression-method, default: none (value: gz, bz2, xz, lz4)
+# choose compression-method, default: none (value: gz, bz2, lz4, zstd)
COMPRESSION=
# change compression-level, default: 6 (value: 1-9)
COMPRESSION_LEVEL=
diff --git a/tests/compose/fetchmail/mailu.env b/tests/compose/fetchmail/mailu.env
index 4a53ec46..afb57751 100644
--- a/tests/compose/fetchmail/mailu.env
+++ b/tests/compose/fetchmail/mailu.env
@@ -92,7 +92,7 @@ DMARC_RUF=admin
# Maildir Compression
-# choose compression-method, default: none (value: gz, bz2, xz, lz4)
+# choose compression-method, default: none (value: gz, bz2, lz4, zstd)
COMPRESSION=
# change compression-level, default: 6 (value: 1-9)
COMPRESSION_LEVEL=
diff --git a/tests/compose/filters/mailu.env b/tests/compose/filters/mailu.env
index 1106deb0..4c8c219d 100644
--- a/tests/compose/filters/mailu.env
+++ b/tests/compose/filters/mailu.env
@@ -92,7 +92,7 @@ DMARC_RUF=admin
# Maildir Compression
-# choose compression-method, default: none (value: gz, bz2, xz, lz4)
+# choose compression-method, default: none (value: gz, bz2, lz4, zstd)
COMPRESSION=
# change compression-level, default: 6 (value: 1-9)
COMPRESSION_LEVEL=
diff --git a/tests/compose/rainloop/mailu.env b/tests/compose/rainloop/mailu.env
index d02b98f2..08b0f8a4 100644
--- a/tests/compose/rainloop/mailu.env
+++ b/tests/compose/rainloop/mailu.env
@@ -92,7 +92,7 @@ DMARC_RUF=admin
# Maildir Compression
-# choose compression-method, default: none (value: gz, bz2, xz, lz4)
+# choose compression-method, default: none (value: gz, bz2, lz4, zstd)
COMPRESSION=
# change compression-level, default: 6 (value: 1-9)
COMPRESSION_LEVEL=
diff --git a/tests/compose/roundcube/mailu.env b/tests/compose/roundcube/mailu.env
index e1005487..faf1198f 100644
--- a/tests/compose/roundcube/mailu.env
+++ b/tests/compose/roundcube/mailu.env
@@ -92,7 +92,7 @@ DMARC_RUF=admin
# Maildir Compression
-# choose compression-method, default: none (value: gz, bz2, xz, lz4)
+# choose compression-method, default: none (value: gz, bz2, lz4, zstd)
COMPRESSION=
# change compression-level, default: 6 (value: 1-9)
COMPRESSION_LEVEL=
diff --git a/tests/compose/webdav/mailu.env b/tests/compose/webdav/mailu.env
index 58b9810a..939f453b 100644
--- a/tests/compose/webdav/mailu.env
+++ b/tests/compose/webdav/mailu.env
@@ -92,7 +92,7 @@ DMARC_RUF=admin
# Maildir Compression
-# choose compression-method, default: none (value: gz, bz2, xz, lz4)
+# choose compression-method, default: none (value: gz, bz2, lz4, zstd)
COMPRESSION=
# change compression-level, default: 6 (value: 1-9)
COMPRESSION_LEVEL=
From 40ad3ca0325b6920128624665bf0164670f6e31d Mon Sep 17 00:00:00 2001
From: lub
Date: Wed, 16 Jun 2021 14:56:53 +0200
Subject: [PATCH 083/181] only load zlib when compression is used
---
core/dovecot/conf/dovecot.conf | 5 ++++-
1 file changed, 4 insertions(+), 1 deletion(-)
diff --git a/core/dovecot/conf/dovecot.conf b/core/dovecot/conf/dovecot.conf
index 50657088..6b97a086 100644
--- a/core/dovecot/conf/dovecot.conf
+++ b/core/dovecot/conf/dovecot.conf
@@ -21,7 +21,10 @@ mail_access_groups = mail
maildir_stat_dirs = yes
mailbox_list_index = yes
mail_vsize_bg_after_count = 100
-mail_plugins = $mail_plugins quota quota_clone zlib{{ ' ' }}
+mail_plugins = $mail_plugins quota quota_clone{{ ' ' }}
+ {%- if COMPRESSION -%}
+ zlib{{ ' ' }}
+ {%- endif %}
{%- if (FULL_TEXT_SEARCH or '').lower() not in ['off', 'false', '0'] -%}
fts fts_xapian
{%- endif %}
From 18f5a2fc11191168c436e862f8f060ba4aeed9c0 Mon Sep 17 00:00:00 2001
From: lub
Date: Wed, 16 Jun 2021 15:01:55 +0200
Subject: [PATCH 084/181] update newsfragment #1694
---
towncrier/newsfragments/1694.feature | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/towncrier/newsfragments/1694.feature b/towncrier/newsfragments/1694.feature
index 41548707..f7e2013e 100644
--- a/towncrier/newsfragments/1694.feature
+++ b/towncrier/newsfragments/1694.feature
@@ -1 +1 @@
-Support configuring xz and lz4 compression for dovecot.
+Support configuring lz4 and zstd compression for dovecot.
From 587901ca51462bf32a4d6df314667a0081a36378 Mon Sep 17 00:00:00 2001
From: lub
Date: Wed, 16 Jun 2021 15:03:09 +0200
Subject: [PATCH 085/181] fix comment in compose .env
---
docs/compose/.env | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docs/compose/.env b/docs/compose/.env
index b4a8b218..27822c37 100644
--- a/docs/compose/.env
+++ b/docs/compose/.env
@@ -97,7 +97,7 @@ WELCOME_SUBJECT=Welcome to your new email account
WELCOME_BODY=Welcome to your new email account, if you can read this, then it is configured properly!
# Maildir Compression
-# choose compression-method, default: none (value: gz, bz2, xz, lz4)
+# choose compression-method, default: none (value: gz, bz2, lz4, zstd)
COMPRESSION=
# change compression-level, default: 6 (value: 1-9)
COMPRESSION_LEVEL=
From a1fd44fced2c65a1c379b445fb65f2a5a24efd24 Mon Sep 17 00:00:00 2001
From: Alexander Graf
Date: Wed, 16 Jun 2021 16:19:31 +0200
Subject: [PATCH 086/181] added lmtp: prefix and documentation
---
core/admin/mailu/internal/views/postfix.py | 39 ++++--
docs/webadministration.rst | 139 +++++++++++----------
2 files changed, 101 insertions(+), 77 deletions(-)
diff --git a/core/admin/mailu/internal/views/postfix.py b/core/admin/mailu/internal/views/postfix.py
index 7f8418cf..c358c37f 100644
--- a/core/admin/mailu/internal/views/postfix.py
+++ b/core/admin/mailu/internal/views/postfix.py
@@ -36,15 +36,23 @@ def postfix_alias_map(alias):
def postfix_transport(email):
if email == '*' or re.match("(^|.*@)\[.*\]$", email):
return flask.abort(404)
- localpart, domain_name = models.Email.resolve_domain(email)
+ _, domain_name = models.Email.resolve_domain(email)
relay = models.Relay.query.get(domain_name) or flask.abort(404)
target = relay.smtp.lower()
port = None
- if use_mx := target.startswith('mx:'):
+ use_lmtp = False
+ use_mx = False
+ # strip prefixes mx: and lmtp:
+ if target.startswith('mx:'):
target = target[3:]
+ use_mx = True
+ elif target.startswith('lmtp:'):
+ target = target[5:]
+ use_lmtp = True
+ # split host:port or [host]:port
if target.startswith('['):
if use_mx or ']' not in target:
- # invalid target (mx: and [])
+ # invalid target (mx: and [] or missing ])
flask.abort(400)
host, rest = target[1:].split(']', 1)
if rest.startswith(':'):
@@ -57,29 +65,38 @@ def postfix_transport(email):
host, port = target.rsplit(':', 1)
else:
host = target
+ # default for empty host part is mx:domain
if not host:
- host = relay.name.lower()
- use_mx = True
+ if not use_lmtp:
+ host = relay.name.lower()
+ use_mx = True
+ else:
+ # lmtp: needs a host part
+ flask.abort(400)
+ # detect ipv6 address or encode host
if ':' in host:
host = f'ipv6:{host}'
else:
try:
host = idna.encode(host).decode('ascii')
except idna.IDNAError:
- # invalid target (fqdn not encodable)
+ # invalid host (fqdn not encodable)
flask.abort(400)
+ # validate port
if port is not None:
try:
port = int(port, 10)
- if port == 25:
- port = None
except ValueError:
- # invalid target (port should be numeric)
+ # invalid port (should be numeric)
flask.abort(400)
- if not use_mx:
+ # create transport
+ transport = 'lmtp' if use_lmtp else 'smtp'
+ # use [] when not using MX lookups or host is an ipv6 address
+ if host.startswith('ipv6:') or (not use_lmtp and not use_mx):
host = f'[{host}]'
+ # create port suffix
port = '' if port is None else f':{port}'
- return flask.jsonify(f'smtp:{host}{port}')
+ return flask.jsonify(f'{transport}:{host}{port}')
@internal.route("/postfix/recipient/map/")
diff --git a/docs/webadministration.rst b/docs/webadministration.rst
index 070eb473..86ce41c0 100644
--- a/docs/webadministration.rst
+++ b/docs/webadministration.rst
@@ -1,7 +1,7 @@
Web administration interface
============================
-The web administration interface is the main website for maintaining your Mailu installation.
+The web administration interface is the main website for maintaining your Mailu installation.
For brevity the web administration interface will now be mentioned as admin gui.
It offers the following configuration options:
@@ -30,13 +30,13 @@ It offers the following configuration options:
* Configure all email domains served by Mailu, including:
* generating dkim and dmarc keys for a domain.
-
+
* view email domain information on how to configure your SPF, DMARC, DKIM and MX dns records for an email domain.
-
+
* Add new email domains.
-
+
* For existing domains, configure users, quotas, aliases, administrators and alternative domain names.
-
+
* access the webmail site.
* lookup settings for configuring your email client.
@@ -49,7 +49,7 @@ The admin GUI is by default accessed via the URL `https:///admin`, wh
To login the admin GUI enter the email address and password of an user.
Only global administrator users have access to all configuration settings and the Rspamd webgui. Other users will be presented with settings for only their account, and domains they are managers of.
-To create a user who is a global administrator for a new installation, the Mailu.env file can be adapted.
+To create a user who is a global administrator for a new installation, the Mailu.env file can be adapted.
For more information see the section 'Admin account - automatic creation' in :ref:`the configuration reference `.
The following sections are only accessible for global administrators:
@@ -69,7 +69,7 @@ The following sections are only accessible for global administrators:
Settings
--------
-After logging in the web administration interface, the settings page is loaded.
+After logging in the web administration interface, the settings page is loaded.
On the settings page the settings of the currently logged in user can be changed.
Changes are saved and effective immediately after clicking the Save Settings button at the bottom of the page.
@@ -77,27 +77,27 @@ Changes are saved and effective immediately after clicking the Save Settings but
Display name
````````````
-On the settings page the displayed name can be changed of the logged in user.
+On the settings page the displayed name can be changed of the logged in user.
This display name is only used within the web administration interface.
Antispam
````````
-Under the section `Antispam` the spam filter can be enabled or disabled for the logged in user. By default the spam filter is enabled.
+Under the section `Antispam` the spam filter can be enabled or disabled for the logged in user. By default the spam filter is enabled.
When the spam filter is disabled, all received email messages will go to the inbox folder of the logged in user.
The exception to this rule, are email messages with an extremely high spam score. These email messages are always rejected by Rspamd.
When the spam filter is enabled, received email messages will be moved to the logged in user's inbox folder or junk folder depending on the user defined spam filter tolerance.
-The user defined spam filter tolerance determines when an email is classified as ham (moved to the inbox folder) or spam (moved to the junk folder).
-The default value is 80%. The lower the spam filter tolerance, the more false positives (ham classified as spam). The higher the spam filter tolerance, the more false negatives (spam classified as ham).
+The user defined spam filter tolerance determines when an email is classified as ham (moved to the inbox folder) or spam (moved to the junk folder).
+The default value is 80%. The lower the spam filter tolerance, the more false positives (ham classified as spam). The higher the spam filter tolerance, the more false negatives (spam classified as ham).
For more information see the :ref:`antispam documentation `.
Auto-forward
`````````````
-Under the section `Auto-forward`, the automatic forwarding of received email messages can be enabled. When enabled, all received email messages are forwarded to the specified email address.
+Under the section `Auto-forward`, the automatic forwarding of received email messages can be enabled. When enabled, all received email messages are forwarded to the specified email address.
The option "Keep a copy of the emails" can be ticked, to keep a copy of the received email message in the inbox folder.
@@ -107,7 +107,7 @@ In the destination textbox, the email addresses can be entered for automatic for
Update password
---------------
-On the `update password` page, the password of the logged in user can be changed. Changes are effective immediately.
+On the `update password` page, the password of the logged in user can be changed. Changes are effective immediately.
.. _webadministration_auto-reply:
@@ -117,7 +117,7 @@ Auto-reply
On the `auto-reply` page, automatic replies can be configured. This is also known as out of office (ooo) or out of facility (oof) replies.
-To enable automatic replies tick the checkbox 'Enable automatic reply'.
+To enable automatic replies tick the checkbox 'Enable automatic reply'.
Under Reply subject the email subject for automatic replies can be configured. When a reply subject is entered, this subject will be used for the automatic reply.
@@ -130,12 +130,12 @@ E.g. if the email subject of the received email message is "how are you?", then
Fetched accounts
----------------
-This page is only available when the Fetchmail container is part of your Mailu deployment.
+This page is only available when the Fetchmail container is part of your Mailu deployment.
Fetchmail can be enabled when creating the docker-compose.yml file with the setup utility (https://setup.mailu.io).
On the `fetched accounts` page you can configure email accounts from which email messages will be retrieved.
-Only unread email messages are retrieved from the specified email account.
-By default Fetchmail will retrieve email messages every 10 minutes. This can be changed in the Mailu.env file.
+Only unread email messages are retrieved from the specified email account.
+By default Fetchmail will retrieve email messages every 10 minutes. This can be changed in the Mailu.env file.
For more information on changing the polling interval see :ref:`the configuration reference `.
@@ -149,7 +149,7 @@ You can add a fetched account by clicking on the `Add an account` button on the
* Enable TLS. Tick this setting if the email server requires TLS/SSL instead of STARTTLS.
-* Username. The user name for logging in to the email server. Normally this is the email address or the email address' local-part (the part before @).
+* Username. The user name for logging in to the email server. Normally this is the email address or the email address' local-part (the part before @).
* Password. The password for logging in to the email server.
@@ -166,8 +166,8 @@ The purpose of an authentication token is to create a unique and strong password
The application will use this authentication token instead of the logged in user's password for sending/receiving email.
This allows safe access to the logged in user's email account. At any moment, the authentication token can be deleted so that the application has no access to the logged in user's email account anymore.
-By clicking on the New token button on the top right of the page, a new authentication token can be created. On this page the generated authentication token will only be displayed once.
-After saving the application token it is not possible anymore to view the unique password.
+By clicking on the New token button on the top right of the page, a new authentication token can be created. On this page the generated authentication token will only be displayed once.
+After saving the application token it is not possible anymore to view the unique password.
The comment field can be used to enter a description for the authentication token. For example the name of the application the application token is created for.
@@ -198,9 +198,9 @@ A global administrator can change `any setting` in the admin GUI. Be careful tha
Relayed domains
---------------
-On the `relayed domains list` page, destination domains can be added that Mailu will relay email messages for without authentication.
-This means that for these destination domains, other email clients or email servers can send email via Mailu unauthenticated via port 25 to this destination domain.
-For example if the destination domain example.com is added. Any emails to example.com (john@example.com) will be relayed to example.com.
+On the `relayed domains list` page, destination domains can be added that Mailu will relay email messages for without authentication.
+This means that for these destination domains, other email clients or email servers can send email via Mailu unauthenticated via port 25 to this destination domain.
+For example if the destination domain example.com is added. Any emails to example.com (john@example.com) will be relayed to example.com.
Example scenario's are:
* relay domain from a backup server.
@@ -212,30 +212,37 @@ Example scenario's are:
On the new relayed domain page the following options can be entered for a new relayed domain:
-* Relayed domain name. The domain name that is relayed. Email messages addressed to this domain (To: John@example.com), will be forwarded to this domain.
- No authentication is required.
+* Relayed domain name. The domain name that is relayed. Email messages addressed to this domain (To: John@example.com), will be forwarded to this domain.
+ No authentication is required.
-* Remote host (optional). The SMPT server that will be used for relaying the email message.
- When this field is blank, the Mailu server will directly send the email message to the relayed domain.
- As value can be entered either a hostname or IP address of the SMPT server.
- By default port 25 is used. To use a different port append ":port number" to the Remote Host. For example:
- 123.45.67.90:2525.
+* Remote host (optional). The host that will be used for relaying the email message.
+ When this field is blank, the Mailu server will directly send the email message to the mail server of the relayed domain.
+ When a remote host is specified it can be prefixed by ``mx:`` or ``lmtp:`` and followed by a port number: ``:port``).
+
+ ================ ===================================== =========================
+ Remote host Description postfix transport:nexthop
+ ================ ===================================== =========================
+ empty use MX of relay domain smtp:domain
+ :port use MX of relay domain and use port smtp:domain:port
+ target resolve A/AAAA of target smtp:[target]
+ target:port resolve A/AAAA of target and use port smtp:[target]:port
+ mx:target resolve MX of target smtp:target
+ mx:target:port resolve MX of target and use port smtp:target:port
+ lmtp:target resolve A/AAAA of target lmtp:target
+ lmtp:target:port resolve A/AAAA of target and use port lmtp:target:port
+ ================ ===================================== =========================
+
+ `target` can also be an IPv4 or IPv6 address (an IPv6 address must be enclosed in []: ``[2001:DB8::]``).
* Comment. A text field where a comment can be entered to describe the entry.
Changes are effective immediately after clicking the Save button.
-NOTE: Due to bug `1588`_ email messages fail to be relayed if no Remote Host is configured.
-As a workaround the HOSTNAME or IP Address of the SMPT server of the relayed domain can be entered as Remote Host.
-Please note that no MX lookup is performed when entering a hostname as Remote Host. You can use the MX lookup on mxtoolbox.com to find the hostname and IP Address of the SMTP server.
-
-.. _`1588`: https://github.com/Mailu/Mailu/issues/1588
-
Antispam
--------
The menu item Antispam opens the Rspamd webgui. For more information how spam filtering works in Mailu see the :ref:`Spam filtering page `.
-The spam filtering page also contains a section that describes how to create a local blacklist for blocking email messages from specific domains.
+The spam filtering page also contains a section that describes how to create a local blacklist for blocking email messages from specific domains.
The Rspamd webgui offers basic functions for setting metric actions, scores, viewing statistics and learning.
The following settings are not persisent and are *lost* when the Antispam container is recreated or restarted:
@@ -266,31 +273,31 @@ On the `Mail domains` page all the domains served by Mailu are configured. Via t
Details
```````
-This page is also accessible for domain managers. On the details page all DNS settings are displayed for configuring your DNS server. It contains information on what to configure as MX record and SPF record. On this page it is also possible to (re-)generate the keys for DKIM and DMARC. The option for generating keys for DKIM and DMARC is only available for global administrators. After generating the keys for DKIM and DMARC, this page will also show the DNS records for configuring the DKIM/DMARC records on the DNS server.
+This page is also accessible for domain managers. On the details page all DNS settings are displayed for configuring your DNS server. It contains information on what to configure as MX record and SPF record. On this page it is also possible to (re-)generate the keys for DKIM and DMARC. The option for generating keys for DKIM and DMARC is only available for global administrators. After generating the keys for DKIM and DMARC, this page will also show the DNS records for configuring the DKIM/DMARC records on the DNS server.
Edit
-````
+````
-This page is only accessible for global administrators. On the edit page, the global settings for the domain can be changed.
+This page is only accessible for global administrators. On the edit page, the global settings for the domain can be changed.
* Maximum user count. The maximum amount of users that can be created under this domain. Once this limit is reached it is not possible anymore to add users to the domain; and it is also not possible for users to self-register.
-
+
* Maximum alias count. The maximum amount of aliases that can be created for an email account.
-
+
* Maximum user quota. The maximum amount of quota that can be assigned to a user. When creating or editing a user, this sets the limit on the maximum amount of quota that can be assigned to the user.
-
-* Enable sign-up. When this option is ticked, self-registration is enabled. When the Admin GUI is accessed, in the menu list the option Signup becomes available.
- Obviously this menu item is only visible when signed out. On the Signup page a user can create an email account.
- If your Admin GUI is available to the public internet, this means your Mailu installation basically becomes a free email provider.
+
+* Enable sign-up. When this option is ticked, self-registration is enabled. When the Admin GUI is accessed, in the menu list the option Signup becomes available.
+ Obviously this menu item is only visible when signed out. On the Signup page a user can create an email account.
+ If your Admin GUI is available to the public internet, this means your Mailu installation basically becomes a free email provider.
Use this option with care!
-
+
* Comment. Description for the domain. This description is visible on the parent domains list page.
Delete
``````
-This page is only accessible for global administrators. This page allows you to delete the domain. The Admin GUI will ask for confirmation if the domain must be really deleted.
+This page is only accessible for global administrators. This page allows you to delete the domain. The Admin GUI will ask for confirmation if the domain must be really deleted.
Users
@@ -326,7 +333,7 @@ For adding a new user the following options can be configured.
* Enabled. Tick this checkbox to enable the user account. When an user is disabled, the user is unable to login to the Admin GUI or webmail or access his email via IMAP/POP3 or send mail.
The email inbox of the user is still retained. This option can be used to temporarily suspend an user account.
-
+
* Quota. The maximum quota for the user's email box.
* Allow IMAP access. When ticked, allows email retrieval via the IMAP protocol.
@@ -337,7 +344,7 @@ For adding a new user the following options can be configured.
Aliases
```````
-This page is also accessible for domain managers. On the aliases page, aliases can be added for email addresses. An alias is a way to disguise another email address.
+This page is also accessible for domain managers. On the aliases page, aliases can be added for email addresses. An alias is a way to disguise another email address.
Everything sent to an alias email address is actually received in the primary email account's inbox of the destination email address.
Aliases can diversify a single email account without having to create multiple email addresses (users).
It is also possible to add multiple email addresses to the destination field. All incoming mails will be sent to each users inbox in this case.
@@ -348,11 +355,11 @@ The following options are available when adding an alias:
* Use SQL LIKE Syntax (e.g. for catch-all aliases). When this option is ticked, you can use SQL LIKE syntax as alias.
The SQL LIKE syntax is used to match text values against a pattern using wildcards. There are two wildcards that can be used with SQL LIKE syntax:
-
+
* % - The percent sign represents zero, one, or multiple characters
* _ - The underscore represents a single character
-
- Examples are:
+
+ Examples are:
* a% - Finds any values that start with "a"
* %a - Finds any values that end with "a"
* %or% - Finds any values that have "or" in any position
@@ -369,7 +376,7 @@ The following options are available when adding an alias:
Managers
````````
-This page is also accessible for domain managers. On the `managers list` page, managers can be added for the domain and can be deleted.
+This page is also accessible for domain managers. On the `managers list` page, managers can be added for the domain and can be deleted.
Managers have access to configuration settings of the domain.
On the `add manager` page you can click on the manager email text box to access a drop down list of users that can be made a manager of the domain.
@@ -377,11 +384,11 @@ On the `add manager` page you can click on the manager email text box to access
Alternatives
````````````
-This page is only accessible for global administrators. On the alternatives page, alternative domains can be added for the domain.
+This page is only accessible for global administrators. On the alternatives page, alternative domains can be added for the domain.
An alternative domain acts as a copy of a given domain.
-Everything sent to an alternative domain, is actually received in the domain the alternative is created for.
-This allows you to receive emails for multiple domains while using a single domain.
-For example if the main domain has the email address user@example.com, and the alternative domain is mymail.com,
+Everything sent to an alternative domain, is actually received in the domain the alternative is created for.
+This allows you to receive emails for multiple domains while using a single domain.
+For example if the main domain has the email address user@example.com, and the alternative domain is mymail.com,
then email send to user@mymail.com will end up in the email box of user@example.com.
New domain
@@ -392,16 +399,16 @@ This page is only accessible for global administrators. Via this page a new doma
* domain name. The name of the domain.
* Maximum user count. The maximum amount of users that can be created under this domain. Once this limit is reached it is not possible anymore to add users to the domain; and it is also not possible for users to self-register.
-
+
* Maximum alias count. The maximum amount of aliases that can be made for an email account.
-
+
* Maximum user quota. The maximum amount of quota that can be assigned to a user. When creating or editing a user, this sets the limit on the maximum amount of quota that can be assigned to the user.
-
-* Enable sign-up. When this option is ticked, self-registration is enabled. When the Admin GUI is accessed, in the menu list the option Signup becomes available.
- Obviously this menu item is only visible when signed out. On the Signup page a user can create an email account.
- If your Admin GUI is available to the public internet, this means your Mailu installation basically becomes a free email provider.
+
+* Enable sign-up. When this option is ticked, self-registration is enabled. When the Admin GUI is accessed, in the menu list the option Signup becomes available.
+ Obviously this menu item is only visible when signed out. On the Signup page a user can create an email account.
+ If your Admin GUI is available to the public internet, this means your Mailu installation basically becomes a free email provider.
Use this option with care!
-
+
* Comment. Description for the domain. This description is visible on the parent domains list page.
@@ -414,7 +421,7 @@ The menu item `Webmail` opens the webmail page. This option is only available if
Client setup
------------
-The menu item `Client setup` shows all settings for configuring your email client for connecting to Mailu.
+The menu item `Client setup` shows all settings for configuring your email client for connecting to Mailu.
Website
From 9ef8aaf6989376480b0ecd6c4c6fc2f98a7af408 Mon Sep 17 00:00:00 2001
From: Alexander Graf
Date: Wed, 16 Jun 2021 22:06:28 +0200
Subject: [PATCH 087/181] removed double confiog and fixed shaker
---
core/admin/mailu/configuration.py | 1 -
core/admin/mailu/utils.py | 3 +--
2 files changed, 1 insertion(+), 3 deletions(-)
diff --git a/core/admin/mailu/configuration.py b/core/admin/mailu/configuration.py
index 679c6c7e..3d1b4fb5 100644
--- a/core/admin/mailu/configuration.py
+++ b/core/admin/mailu/configuration.py
@@ -140,7 +140,6 @@ class ConfigManager(dict):
self.config['SESSION_STORAGE_URL'] = 'redis://{0}/3'.format(self.config['REDIS_ADDRESS'])
self.config['SESSION_COOKIE_SAMESITE'] = 'Strict'
self.config['SESSION_COOKIE_HTTPONLY'] = True
- self.config['SESSION_KEY_BITS'] = 128
self.config['PERMANENT_SESSION_LIFETIME'] = timedelta(hours=int(self.config['SESSION_LIFETIME']))
# update the app config itself
app.config = self
diff --git a/core/admin/mailu/utils.py b/core/admin/mailu/utils.py
index 30725ff7..214a9a2d 100644
--- a/core/admin/mailu/utils.py
+++ b/core/admin/mailu/utils.py
@@ -260,9 +260,8 @@ class MailuSessionConfig:
hash_bytes = bits//8 + (bits%8>0)
time_bytes = 4 # 32 bit timestamp for now
- shaker = hashlib.shake_256 if bits>128 else hashlib.shake_128
- self._shaker = shaker(want_bytes(app.config.get('SECRET_KEY', '')))
+ self._shaker = hashlib.shake_128(want_bytes(app.config.get('SECRET_KEY', '')))
self._hash_len = hash_bytes
self._hash_b64 = len(self._encode(bytes(hash_bytes)))
self._key_min = 2*self._hash_b64
From 3f23e199f6b64bb4960d8368a4c670f16c5f705b Mon Sep 17 00:00:00 2001
From: Alexander Graf
Date: Thu, 17 Jun 2021 17:53:15 +0200
Subject: [PATCH 088/181] modified generation of session key and added refresh
- the session key is now generated using
- a hash of the uid seeded by the apps secret_key (size: SESSION_KEY_BITS)
- a random token (size: 128 bits)
- the session's creation time (size: 32 bits)
- redis server side sessions are now refreshed after 1/2 the session lifetime
even if not modified
- the cookie is also updated if necessary
---
core/admin/mailu/utils.py | 96 +++++++++++++++++++++++++--------------
1 file changed, 61 insertions(+), 35 deletions(-)
diff --git a/core/admin/mailu/utils.py b/core/admin/mailu/utils.py
index 214a9a2d..c7e1f73c 100644
--- a/core/admin/mailu/utils.py
+++ b/core/admin/mailu/utils.py
@@ -6,7 +6,7 @@ try:
except ImportError:
import pickle
-import hashlib
+import hmac
import secrets
import time
@@ -218,13 +218,16 @@ class MailuSession(CallbackDict, SessionMixin):
def save(self):
""" Save session to store. """
+ set_cookie = False
+
# set uid from dict data
if self._uid is None:
self._uid = self.app.session_config.gen_uid(self.get('user_id', ''))
- # create new session id for new or regenerated sessions
+ # create new session id for new or regenerated sessions and force setting the cookie
if self._sid is None:
self._sid = self.app.session_config.gen_sid()
+ set_cookie = True
# get new session key
key = self.sid
@@ -233,6 +236,9 @@ class MailuSession(CallbackDict, SessionMixin):
if key != self._key:
self.delete()
+ # remember time to refresh
+ self['_refresh'] = int(time.time()) + self.app.permanent_session_lifetime.total_seconds()/2
+
# save session
self.app.session_store.put(
key,
@@ -245,37 +251,52 @@ class MailuSession(CallbackDict, SessionMixin):
self.new = False
self.modified = False
+ return set_cookie
+
+ def needs_refresh(self):
+ """ Checks if server side session needs to be refreshed. """
+
+ return int(time.time()) > self.get('_refresh', 0)
+
class MailuSessionConfig:
""" Stores sessions crypto config """
+ # default size of session key parts
+ uid_bits = 64 # default if SESSION_KEY_BITS is not set in config
+ sid_bits = 128 # for now. must be multiple of 8!
+ time_bits = 32 # for now. must be multiple of 8!
+
def __init__(self, app=None):
if app is None:
app = flask.current_app
- bits = app.config.get('SESSION_KEY_BITS', 64)
+ bits = app.config.get('SESSION_KEY_BITS', self.uid_bits)
+ if not 64 <= bits <= 256:
+ raise ValueError('SESSION_KEY_BITS must be between 64 and 256!')
- if bits < 64:
- raise ValueError('Session id entropy must not be less than 64 bits!')
+ uid_bytes = bits//8 + (bits%8>0)
+ sid_bytes = self.sid_bits//8
- hash_bytes = bits//8 + (bits%8>0)
- time_bytes = 4 # 32 bit timestamp for now
+ key = want_bytes(app.secret_key)
- self._shaker = hashlib.shake_128(want_bytes(app.config.get('SECRET_KEY', '')))
- self._hash_len = hash_bytes
- self._hash_b64 = len(self._encode(bytes(hash_bytes)))
- self._key_min = 2*self._hash_b64
- self._key_max = self._key_min + len(self._encode(bytes(time_bytes)))
+ self._hmac = hmac.new(hmac.digest(key, key, digest='sha256'), digestmod='sha256')
+ self._uid_len = uid_bytes
+ self._uid_b64 = len(self._encode(bytes(uid_bytes)))
+ self._sid_len = sid_bytes
+ self._sid_b64 = len(self._encode(bytes(sid_bytes)))
+ self._key_min = self._uid_b64 + self._sid_b64
+ self._key_max = self._key_min + len(self._encode(bytes(self.time_bits//8)))
def gen_sid(self):
""" Generate random session id. """
- return self._encode(secrets.token_bytes(self._hash_len))
+ return self._encode(secrets.token_bytes(self._sid_len))
def gen_uid(self, uid):
""" Generate hashed user id part of session key. """
- shaker = self._shaker.copy()
- shaker.update(want_bytes(uid))
- return self._encode(shaker.digest(self._hash_len))
+ _hmac = self._hmac.copy()
+ _hmac.update(want_bytes(uid))
+ return self._encode(_hmac.digest()[:self._uid_len])
def gen_created(self, now=None):
""" Generate base64 representation of creation time. """
@@ -287,8 +308,8 @@ class MailuSessionConfig:
if not (isinstance(key, bytes) and self._key_min <= len(key) <= self._key_max):
return None
- uid = key[:self._hash_b64]
- sid = key[self._hash_b64:self._key_min]
+ uid = key[:self._uid_b64]
+ sid = key[self._uid_b64:self._key_min]
crt = key[self._key_min:]
# validate if parts are decodeable
@@ -301,7 +322,7 @@ class MailuSessionConfig:
if now is None:
now = int(time.time())
created = int.from_bytes(created, byteorder='big')
- if not (created < now < created + app.permanent_session_lifetime.total_seconds()):
+ if not created < now < created + app.permanent_session_lifetime.total_seconds():
return None
return (uid, sid, crt)
@@ -341,24 +362,29 @@ class MailuSessionInterface(SessionInterface):
if session.accessed:
response.vary.add('Cookie')
- # TODO: set cookie from time to time to prevent expiration in browser
- # also update expire in redis
+ set_cookie = session.permanent and app.config['SESSION_REFRESH_EACH_REQUEST']
+ need_refresh = session.needs_refresh()
- if not self.should_set_cookie(app, session):
- return
+ # save modified session or refresh unmodified session
+ if session.modified or need_refresh:
+ set_cookie |= session.save()
- # save session and update cookie
- session.save()
- response.set_cookie(
- app.session_cookie_name,
- session.sid,
- expires=self.get_expiration_time(app, session),
- httponly=self.get_cookie_httponly(app),
- domain=self.get_cookie_domain(app),
- path=self.get_cookie_path(app),
- secure=self.get_cookie_secure(app),
- samesite=self.get_cookie_samesite(app)
- )
+ # set cookie on refreshed permanent sessions
+ if need_refresh and session.permanent:
+ set_cookie = True
+
+ # set or update cookie if necessary
+ if set_cookie:
+ response.set_cookie(
+ app.session_cookie_name,
+ session.sid,
+ expires=self.get_expiration_time(app, session),
+ httponly=self.get_cookie_httponly(app),
+ domain=self.get_cookie_domain(app),
+ path=self.get_cookie_path(app),
+ secure=self.get_cookie_secure(app),
+ samesite=self.get_cookie_samesite(app)
+ )
class MailuSessionExtension:
""" Server side session handling """
From 49c5c0eba691791c1cc7e4e1b0b4746b38f57074 Mon Sep 17 00:00:00 2001
From: parisni
Date: Fri, 18 Jun 2021 23:17:35 +0200
Subject: [PATCH 089/181] Split mailu / roundcube db config
There is no reason to share the flavor since at least the dbname shall be different.
---
webmails/roundcube/start.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/webmails/roundcube/start.py b/webmails/roundcube/start.py
index 649f3324..36502eb6 100755
--- a/webmails/roundcube/start.py
+++ b/webmails/roundcube/start.py
@@ -10,7 +10,7 @@ log.basicConfig(stream=sys.stderr, level=os.environ.get("LOG_LEVEL", "WARNING"))
os.environ["MAX_FILESIZE"] = str(int(int(os.environ.get("MESSAGE_SIZE_LIMIT"))*0.66/1048576))
-db_flavor=os.environ.get("ROUNDCUBE_DB_FLAVOR",os.environ.get("DB_FLAVOR","sqlite"))
+db_flavor=os.environ.get("ROUNDCUBE_DB_FLAVOR","sqlite")
if db_flavor=="sqlite":
os.environ["DB_DSNW"]="sqlite:////data/roundcube.db"
elif db_flavor=="mysql":
From 5386e33af303af9e46f18fd7e0c6379f37bd3858 Mon Sep 17 00:00:00 2001
From: parisni
Date: Fri, 18 Jun 2021 23:21:24 +0200
Subject: [PATCH 090/181] Reformat python
---
webmails/roundcube/start.py | 43 ++++++++++++++++++-------------------
1 file changed, 21 insertions(+), 22 deletions(-)
diff --git a/webmails/roundcube/start.py b/webmails/roundcube/start.py
index 36502eb6..3e47ce69 100755
--- a/webmails/roundcube/start.py
+++ b/webmails/roundcube/start.py
@@ -8,31 +8,29 @@ import subprocess
log.basicConfig(stream=sys.stderr, level=os.environ.get("LOG_LEVEL", "WARNING"))
-os.environ["MAX_FILESIZE"] = str(int(int(os.environ.get("MESSAGE_SIZE_LIMIT"))*0.66/1048576))
+os.environ["MAX_FILESIZE"] = str(int(int(os.environ.get("MESSAGE_SIZE_LIMIT")) * 0.66 / 1048576))
-db_flavor=os.environ.get("ROUNDCUBE_DB_FLAVOR","sqlite")
-if db_flavor=="sqlite":
- os.environ["DB_DSNW"]="sqlite:////data/roundcube.db"
-elif db_flavor=="mysql":
- os.environ["DB_DSNW"]="mysql://%s:%s@%s/%s" % (
- os.environ.get("ROUNDCUBE_DB_USER","roundcube"),
+db_flavor = os.environ.get("ROUNDCUBE_DB_FLAVOR", "sqlite")
+if db_flavor == "sqlite":
+ os.environ["DB_DSNW"] = "sqlite:////data/roundcube.db"
+elif db_flavor == "mysql":
+ os.environ["DB_DSNW"] = "mysql://%s:%s@%s/%s" % (
+ os.environ.get("ROUNDCUBE_DB_USER", "roundcube"),
os.environ.get("ROUNDCUBE_DB_PW"),
- os.environ.get("ROUNDCUBE_DB_HOST",os.environ.get("DB_HOST","database")),
- os.environ.get("ROUNDCUBE_DB_NAME","roundcube")
- )
-elif db_flavor=="postgresql":
- os.environ["DB_DSNW"]="pgsql://%s:%s@%s/%s" % (
- os.environ.get("ROUNDCUBE_DB_USER","roundcube"),
+ os.environ.get("ROUNDCUBE_DB_HOST", os.environ.get("DB_HOST", "database")),
+ os.environ.get("ROUNDCUBE_DB_NAME", "roundcube")
+ )
+elif db_flavor == "postgresql":
+ os.environ["DB_DSNW"] = "pgsql://%s:%s@%s/%s" % (
+ os.environ.get("ROUNDCUBE_DB_USER", "roundcube"),
os.environ.get("ROUNDCUBE_DB_PW"),
- os.environ.get("ROUNDCUBE_DB_HOST",os.environ.get("DB_HOST","database")),
- os.environ.get("ROUNDCUBE_DB_NAME","roundcube")
- )
+ os.environ.get("ROUNDCUBE_DB_HOST", os.environ.get("DB_HOST", "database")),
+ os.environ.get("ROUNDCUBE_DB_NAME", "roundcube")
+ )
else:
- print("Unknown ROUNDCUBE_DB_FLAVOR: %s",db_flavor)
+ print("Unknown ROUNDCUBE_DB_FLAVOR: %s", db_flavor)
exit(1)
-
-
conf.jinja("/php.ini", os.environ, "/usr/local/etc/php/conf.d/roundcube.ini")
# Create dirs, setup permissions
@@ -42,7 +40,8 @@ os.system("chown -R www-data:www-data /var/www/html/logs")
try:
print("Initializing database")
- result=subprocess.check_output(["/var/www/html/bin/initdb.sh","--dir","/var/www/html/SQL"],stderr=subprocess.STDOUT)
+ result = subprocess.check_output(["/var/www/html/bin/initdb.sh", "--dir", "/var/www/html/SQL"],
+ stderr=subprocess.STDOUT)
print(result.decode())
except subprocess.CalledProcessError as e:
if "already exists" in e.stdout.decode():
@@ -53,7 +52,7 @@ except subprocess.CalledProcessError as e:
try:
print("Upgrading database")
- subprocess.check_call(["/var/www/html/bin/update.sh","--version=?","-y"],stderr=subprocess.STDOUT)
+ subprocess.check_call(["/var/www/html/bin/update.sh", "--version=?", "-y"], stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
quit(1)
@@ -61,7 +60,7 @@ except subprocess.CalledProcessError as e:
os.system("chown -R www-data:www-data /data")
# Tail roundcube logs
-subprocess.Popen(["tail","-f","-n","0","/var/www/html/logs/errors.log"])
+subprocess.Popen(["tail", "-f", "-n", "0", "/var/www/html/logs/errors.log"])
# Run apache
os.execv("/usr/local/bin/apache2-foreground", ["apache2-foreground"])
From 278878d48db01ee581014dd8cc0d102f43fb12c4 Mon Sep 17 00:00:00 2001
From: parisni
Date: Fri, 18 Jun 2021 23:36:14 +0200
Subject: [PATCH 091/181] Remove unused deps
---
optional/postgresql/start.py | 1 -
1 file changed, 1 deletion(-)
diff --git a/optional/postgresql/start.py b/optional/postgresql/start.py
index 1f2f2a2b..d318d4d9 100755
--- a/optional/postgresql/start.py
+++ b/optional/postgresql/start.py
@@ -2,7 +2,6 @@
import anosql
import psycopg2
-import jinja2
import glob
import os
import subprocess
From d2803f6f4613df2948c19931793eede3a9c72362 Mon Sep 17 00:00:00 2001
From: parisni
Date: Sat, 19 Jun 2021 00:38:53 +0200
Subject: [PATCH 092/181] Update setup website
---
optional/postgresql/start.py | 1 -
setup/flavors/compose/mailu.env | 6 ++++++
setup/static/render.js | 21 +++++++++++++++++++++
setup/templates/steps/database.html | 14 +++++++++++++-
4 files changed, 40 insertions(+), 2 deletions(-)
diff --git a/optional/postgresql/start.py b/optional/postgresql/start.py
index d318d4d9..e34e157e 100755
--- a/optional/postgresql/start.py
+++ b/optional/postgresql/start.py
@@ -37,7 +37,6 @@ if not os.listdir("/data"):
rec.write("restore_command = 'gunzip < /backup/wal_archive/%f > %p'\n")
rec.write("standby_mode = off\n")
os.system("chown postgres:postgres /data/recovery.conf")
- #os.system("sudo -u postgres pg_ctl start -D /data -o '-h \"''\" '")
else:
# Bootstrap the database
os.system("sudo -u postgres initdb -D /data")
diff --git a/setup/flavors/compose/mailu.env b/setup/flavors/compose/mailu.env
index 44452e36..150c70a3 100644
--- a/setup/flavors/compose/mailu.env
+++ b/setup/flavors/compose/mailu.env
@@ -175,3 +175,9 @@ DB_HOST={{ db_url }}
DB_NAME={{ db_name }}
{% endif %}
+{% if (postgresql == 'external' or db_flavor == 'mysql') and webmail_type == 'roundcube' %}
+ROUNDCUBE_DB_USER={{ roundcube_db_user }}
+ROUNDCUBE_DB_PW={{ roundcube_db_pw }}
+ROUNDCUBE_DB_HOST={{ roundcube_db_url }}
+ROUNDCUBE_DB_NAME={{ roundcube_db_name }}
+{% endif %}
diff --git a/setup/static/render.js b/setup/static/render.js
index a817c4f0..0a0a6675 100644
--- a/setup/static/render.js
+++ b/setup/static/render.js
@@ -57,6 +57,13 @@ $(document).ready(function() {
$("#db_pw").prop('required',true);
$("#db_url").prop('required',true);
$("#db_name").prop('required',true);
+ if ($("#webmail").val() == 'roundcube') {
+ $("#roundcube_external_db").show();
+ $("#roundcube_db_user").prop('required',true);
+ $("#roundcube_db_pw").prop('required',true);
+ $("#roundcube_db_url").prop('required',true);
+ $("#roundcube_db_name").prop('required',true);
+ }
} else if (this.value == 'mysql') {
$("#postgres_db").hide();
$("#external_db").show();
@@ -64,6 +71,13 @@ $(document).ready(function() {
$("#db_pw").prop('required',true);
$("#db_url").prop('required',true);
$("#db_name").prop('required',true);
+ if ($("#webmail").val() == 'roundcube') {
+ $("#roundcube_external_db").show();
+ $("#roundcube_db_user").prop('required',true);
+ $("#roundcube_db_pw").prop('required',true);
+ $("#roundcube_db_url").prop('required',true);
+ $("#roundcube_db_name").prop('required',true);
+ }
}
});
$("#external_psql").change(function() {
@@ -73,6 +87,13 @@ $(document).ready(function() {
$("#db_pw").prop('required',true);
$("#db_url").prop('required',true);
$("#db_name").prop('required',true);
+ if ($("#webmail").val() == 'roundcube') {
+ $("#roundcube_external_db").show();
+ $("#roundcube_db_user").prop('required',true);
+ $("#roundcube_db_pw").prop('required',true);
+ $("#roundcube_db_url").prop('required',true);
+ $("#roundcube_db_name").prop('required',true);
+ }
} else {
$("#external_db").hide();
}
diff --git a/setup/templates/steps/database.html b/setup/templates/steps/database.html
index ad5411ab..d7184110 100644
--- a/setup/templates/steps/database.html
+++ b/setup/templates/steps/database.html
@@ -28,7 +28,7 @@
-
Set external database parameters
+
Set external database parameters for ADMIN UI
@@ -37,6 +37,18 @@
+
+
+
Set external database parameters for Roundcube
+
+
+
+
+
+
+
+
+
From 84e59c0a6e4d84b1a91c8e84293a7abd12259f6d Mon Sep 17 00:00:00 2001
From: parisni
Date: Sat, 19 Jun 2021 01:22:23 +0200
Subject: [PATCH 093/181] Add missing roundcube_db_flavor
---
setup/flavors/compose/mailu.env | 1 +
1 file changed, 1 insertion(+)
diff --git a/setup/flavors/compose/mailu.env b/setup/flavors/compose/mailu.env
index 150c70a3..4dce34f1 100644
--- a/setup/flavors/compose/mailu.env
+++ b/setup/flavors/compose/mailu.env
@@ -176,6 +176,7 @@ DB_NAME={{ db_name }}
{% endif %}
{% if (postgresql == 'external' or db_flavor == 'mysql') and webmail_type == 'roundcube' %}
+ROUNDCUBE_DB_FLAVOR={{ db_flavor }}
ROUNDCUBE_DB_USER={{ roundcube_db_user }}
ROUNDCUBE_DB_PW={{ roundcube_db_pw }}
ROUNDCUBE_DB_HOST={{ roundcube_db_url }}
From 14307c83c13877234d3daec11999f26d5bb0efff Mon Sep 17 00:00:00 2001
From: parisni
Date: Sat, 19 Jun 2021 09:12:46 +0200
Subject: [PATCH 094/181] Document databases variable and deprecation
---
docs/configuration.rst | 20 ++++++++++++++++++++
docs/database.rst | 3 ++-
2 files changed, 22 insertions(+), 1 deletion(-)
diff --git a/docs/configuration.rst b/docs/configuration.rst
index e08675a8..16ea23c3 100644
--- a/docs/configuration.rst
+++ b/docs/configuration.rst
@@ -195,4 +195,24 @@ resolved. This can be used to rely on DNS based service discovery with changing
When using ``*_ADDRESS``, the hostnames must be full-qualified hostnames. Otherwise nginx will not be able to
resolve the hostnames.
+Database settings
+-----------------
+
+The admin service stores configurations in a database.
+
+- ``DB_FLAVOR``: the database type for mailu admin service. (``sqlite``, ``postgresql``, ``mysql``)
+- ``DB_HOST``: the database host for mailu admin service. (when not ``sqlite``)
+- ``DB_PORT``: the database port for mailu admin service. (when not ``sqlite``)
+- ``DB_PW``: the database password for mailu admin service. (when not ``sqlite``)
+- ``DB_USER``: the database user for mailu admin service. (when not ``sqlite``)
+- ``DB_NAME``: the database name for mailu admin service. (when not ``sqlite``)
+
+The roundcube service stores configurations in a database.
+
+- ``ROUNDCUBE_DB_FLAVOR``: the database type for roundcube service. (``sqlite``, ``postgresql``, ``mysql``)
+- ``ROUNDCUBE_DB_HOST``: the database host for roundcube service. (when not ``sqlite``)
+- ``ROUNDCUBE_DB_PORT``: the database port for roundcube service. (when not ``sqlite``)
+- ``ROUNDCUBE_DB_PW``: the database password for roundcube service. (when not ``sqlite``)
+- ``ROUNDCUBE_DB_USER``: the database user for roundcube service. (when not ``sqlite``)
+- ``ROUNDCUBE_DB_NAME``: the database name for roundcube service. (when not ``sqlite``)
diff --git a/docs/database.rst b/docs/database.rst
index b2526d6f..c13ca0bf 100644
--- a/docs/database.rst
+++ b/docs/database.rst
@@ -8,7 +8,8 @@ This functionality should still be considered experimental!
Mailu Postgresql
----------------
-Mailu optionally comes with a pre-configured Postgresql image.
+Mailu optionally comes with a pre-configured Postgresql image, wich as of 1.8 is deprecated
+will be removed in 1.9
This images has the following features:
- Automatic creation of users, db, extensions and password;
From a9548e4cbd31a9cd1b04414fc3ad4ca668f05bf6 Mon Sep 17 00:00:00 2001
From: parisni
Date: Sat, 19 Jun 2021 09:20:23 +0200
Subject: [PATCH 095/181] Remove mailu/roundcube shared host
---
webmails/roundcube/start.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/webmails/roundcube/start.py b/webmails/roundcube/start.py
index 3e47ce69..f87e460f 100755
--- a/webmails/roundcube/start.py
+++ b/webmails/roundcube/start.py
@@ -17,14 +17,14 @@ elif db_flavor == "mysql":
os.environ["DB_DSNW"] = "mysql://%s:%s@%s/%s" % (
os.environ.get("ROUNDCUBE_DB_USER", "roundcube"),
os.environ.get("ROUNDCUBE_DB_PW"),
- os.environ.get("ROUNDCUBE_DB_HOST", os.environ.get("DB_HOST", "database")),
+ os.environ.get("ROUNDCUBE_DB_HOST", "database"),
os.environ.get("ROUNDCUBE_DB_NAME", "roundcube")
)
elif db_flavor == "postgresql":
os.environ["DB_DSNW"] = "pgsql://%s:%s@%s/%s" % (
os.environ.get("ROUNDCUBE_DB_USER", "roundcube"),
os.environ.get("ROUNDCUBE_DB_PW"),
- os.environ.get("ROUNDCUBE_DB_HOST", os.environ.get("DB_HOST", "database")),
+ os.environ.get("ROUNDCUBE_DB_HOST", "database"),
os.environ.get("ROUNDCUBE_DB_NAME", "roundcube")
)
else:
From f4c76d49c1862894eba341eca318629a2137f274 Mon Sep 17 00:00:00 2001
From: parisni
Date: Sat, 19 Jun 2021 09:30:32 +0200
Subject: [PATCH 096/181] Add changelog entry
---
towncrier/newsfragments/1831.bugfix | 1 +
1 file changed, 1 insertion(+)
create mode 100644 towncrier/newsfragments/1831.bugfix
diff --git a/towncrier/newsfragments/1831.bugfix b/towncrier/newsfragments/1831.bugfix
new file mode 100644
index 00000000..1094be34
--- /dev/null
+++ b/towncrier/newsfragments/1831.bugfix
@@ -0,0 +1 @@
+Fix roundcube environment configuration for databases
\ No newline at end of file
From 58235bcc44e9a7449229ea0d851e35846a5ce176 Mon Sep 17 00:00:00 2001
From: Dimitri Huisman
Date: Sat, 26 Jun 2021 08:25:15 +0000
Subject: [PATCH 097/181] Switch to github actions for CI/CD
---
.github/worfklows/CI.yml | 276 +++++++++++++++++++++++
.travis.yml | 56 -----
bors.toml | 3 +-
tests/compose/filters/00_create_users.sh | 5 +
4 files changed, 283 insertions(+), 57 deletions(-)
create mode 100644 .github/worfklows/CI.yml
delete mode 100644 .travis.yml
create mode 100755 tests/compose/filters/00_create_users.sh
diff --git a/.github/worfklows/CI.yml b/.github/worfklows/CI.yml
new file mode 100644
index 00000000..3dcaf096
--- /dev/null
+++ b/.github/worfklows/CI.yml
@@ -0,0 +1,276 @@
+
+
+name: CI
+on:
+ push:
+ branches:
+ - staging
+ - testing
+ - '1.5'
+ - '1.6'
+ - '1.7'
+ - '1.8'
+ - master
+ # version tags, e.g. 1.7.1
+ - '[1-9].[0-9].[0-9]'
+ # pre-releases, e.g. 1.8-pre1
+ - 1.8-pre[0-9]
+ # test branches, e.g. test-debian
+ - test-*
+
+###############################################
+# REQUIRED secrets
+# DOCKER_UN: ${{ secrets.Docker_Login }}
+# Username of docker login for pushing the images to repo $DOCKER_ORG
+# DOCKER_PW: ${{ secrets.Docker_Password }}
+# Password of docker login for pushing the images to repo $DOCKER_ORG
+# DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
+# The docker repository where the images are pushed to.
+#
+# Add the above secrets to your github repo to determine where the images will be pushed.
+################################################
+
+jobs:
+ build:
+ name: build
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v2
+ - name: Extract branch name
+ shell: bash
+ run: |
+ echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
+ - name: install python packages
+ run: python3 -m pip install -r tests/requirements.txt
+ - name: check docker-compose version
+ run: docker-compose -v
+ - name: login docker
+ env:
+ DOCKER_UN: ${{ secrets.Docker_Login }}
+ DOCKER_PW: ${{ secrets.Docker_Password }}
+ DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
+ run: echo "$DOCKER_PW" | docker login --username $DOCKER_UN --password-stdin
+ # In this step, this action saves a list of existing images,
+ # the cache is created without them in the post run.
+ # It also restores the cache if it exists.
+ - uses: satackey/action-docker-layer-caching@v0.0.11
+ # Ignore the failure of a step and avoid terminating the job.
+ continue-on-error: true
+ - name: build all docker images
+ env:
+ MAILU_VERSION: ${{ env.BRANCH }}
+ TRAVIS_BRANCH: ${{ env.BRANCH }}
+ DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
+ run: docker-compose -f tests/build.yml build
+
+#NOTE: It appears the filter test depends on the core test. The filter test requires an email user
+#that is created by the core test.
+ core-test:
+ name: core test
+ runs-on: ubuntu-latest
+ needs:
+ - build
+ steps:
+ - uses: actions/checkout@v2
+ - name: Extract branch name
+ shell: bash
+ run: |
+ echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
+ - name: install python packages
+ run: python3 -m pip install -r tests/requirements.txt
+ - uses: satackey/action-docker-layer-caching@v0.0.11
+ # Ignore the failure of a step and avoid terminating the job.
+ continue-on-error: true
+ - name: copy all certs
+ run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*'
+ - name: test core suite
+ run: python tests/compose/test.py core 1
+ env:
+ MAILU_VERSION: ${{ env.BRANCH }}
+ TRAVIS_BRANCH: ${{ env.BRANCH }}
+ DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
+
+ filter-test:
+ name: filter test
+ runs-on: ubuntu-latest
+ needs:
+ - build
+ steps:
+ - uses: actions/checkout@v2
+ - name: Extract branch name
+ shell: bash
+ run: |
+ echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
+ - name: install python packages
+ run: python3 -m pip install -r tests/requirements.txt
+ - uses: satackey/action-docker-layer-caching@v0.0.11
+ # Ignore the failure of a step and avoid terminating the job.
+ continue-on-error: true
+ - name: copy all certs
+ run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*'
+ - name: 'test clamvav'
+ run: python tests/compose/test.py filters 2
+ env:
+ MAILU_VERSION: ${{ env.BRANCH }}
+ TRAVIS_BRANCH: ${{ env.BRANCH }}
+ DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
+
+ fetch-test:
+ name: fetch test
+ runs-on: ubuntu-latest
+ needs:
+ - build
+ steps:
+ - uses: actions/checkout@v2
+ - name: Extract branch name
+ shell: bash
+ run: |
+ echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
+ - name: install python packages
+ run: python3 -m pip install -r tests/requirements.txt
+ - uses: satackey/action-docker-layer-caching@v0.0.11
+ # Ignore the failure of a step and avoid terminating the job.
+ continue-on-error: true
+ - name: copy all certs
+ run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*'
+ - name: test fetch
+ run: python tests/compose/test.py fetchmail 1
+ env:
+ MAILU_VERSION: ${{ env.BRANCH }}
+ TRAVIS_BRANCH: ${{ env.BRANCH }}
+ DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
+
+ rainloop-test:
+ name: rainloop test
+ runs-on: ubuntu-latest
+ needs:
+ - build
+ steps:
+ - uses: actions/checkout@v2
+ - name: Extract branch name
+ shell: bash
+ run: |
+ echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
+ - name: install python packages
+ run: python3 -m pip install -r tests/requirements.txt
+ - uses: satackey/action-docker-layer-caching@v0.0.11
+ # Ignore the failure of a step and avoid terminating the job.
+ continue-on-error: true
+ - name: copy all certs
+ run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*'
+ - name: test rainloop
+ run: python tests/compose/test.py rainloop 1
+ env:
+ MAILU_VERSION: ${{ env.BRANCH }}
+ TRAVIS_BRANCH: ${{ env.BRANCH }}
+ DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
+
+ roundcube-test:
+ name: roundcube test
+ runs-on: ubuntu-latest
+ needs:
+ - build
+ steps:
+ - uses: actions/checkout@v2
+ - name: Extract branch name
+ shell: bash
+ run: |
+ echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
+ - name: install python packages
+ run: python3 -m pip install -r tests/requirements.txt
+ - uses: satackey/action-docker-layer-caching@v0.0.11
+ # Ignore the failure of a step and avoid terminating the job.
+ continue-on-error: true
+ - name: copy all certs
+ run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*'
+ - name: test roundcube
+ run: python tests/compose/test.py roundcube 1
+ env:
+ MAILU_VERSION: ${{ env.BRANCH }}
+ TRAVIS_BRANCH: ${{ env.BRANCH }}
+ DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
+
+ webdav-test:
+ name: webdav test
+ runs-on: ubuntu-latest
+ needs:
+ - build
+ steps:
+ - uses: actions/checkout@v2
+ - name: Extract branch name
+ shell: bash
+ run: |
+ echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
+ - name: install python packages
+ run: python3 -m pip install -r tests/requirements.txt
+ - uses: satackey/action-docker-layer-caching@v0.0.11
+ # Ignore the failure of a step and avoid terminating the job.
+ continue-on-error: true
+ - name: copy all certs
+ run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*'
+ - name: test webdav
+ run: python tests/compose/test.py webdav 1
+ env:
+ MAILU_VERSION: ${{ env.BRANCH }}
+ TRAVIS_BRANCH: ${{ env.BRANCH }}
+ DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
+
+ deploy:
+ name: deploy step
+ runs-on: ubuntu-latest
+ needs:
+ - build
+ - core-test
+ - filter-test
+ - fetch-test
+ - rainloop-test
+ - roundcube-test
+ - webdav-test
+ steps:
+ - uses: actions/checkout@v2
+ - name: Extract branch name
+ shell: bash
+ run: |
+ echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
+ - name: install python packages
+ run: python3 -m pip install -r tests/requirements.txt
+ - uses: satackey/action-docker-layer-caching@v0.0.11
+ # Ignore the failure of a step and avoid terminating the job.
+ continue-on-error: true
+ - name: copy all certs
+ run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*'
+ - name: login docker
+ env:
+ DOCKER_UN: ${{ secrets.Docker_Login }}
+ DOCKER_PW: ${{ secrets.Docker_Password }}
+ run: echo "$DOCKER_PW" | docker login --username $DOCKER_UN --password-stdin
+ - name: build all docker images
+ run: docker-compose -f tests/build.yml build
+ env:
+ MAILU_VERSION: ${{ env.BRANCH }}
+ TRAVIS_BRANCH: ${{ env.BRANCH }}
+ DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
+ - name: deploy built docker images
+ env:
+ DOCKER_UN: ${{ secrets.Docker_Login }}
+ DOCKER_PW: ${{ secrets.Docker_Password }}
+ DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
+ MAILU_VERSION: ${{ env.BRANCH }}
+ TRAVIS_BRANCH: ${{ env.BRANCH }}
+ TRAVIS_COMMIT_MESSAGE: ${{ github.event.head_commit.message }}
+ run: bash tests/deploy.sh
+
+ # This job is watched by bors. It only complets if building,testing and deploy worked.
+ ci-success:
+ name: CI-Done
+ #Returns true when none of the **previous** steps have failed or been canceled.
+ if: ${{ success() }}
+ needs:
+ - deploy
+ runs-on: ubuntu-latest
+ steps:
+ - name: CI/CD succeeded.
+ run: exit 0
+
+
diff --git a/.travis.yml b/.travis.yml
deleted file mode 100644
index f2a85630..00000000
--- a/.travis.yml
+++ /dev/null
@@ -1,56 +0,0 @@
-branches:
- only:
- - staging
- - testing
- - '1.5'
- - '1.6'
- - '1.7'
- - '1.8'
- - master
- # version tags, e.g. 1.7.1
- - /^1\.[5678]\.\d+$/
- # pre-releases, e.g. 1.8-pre1
- - /^1\.8-pre\d+$/
- # test branches, e.g. test-debian
- - /^test-[\w\-\.]+$/
-
-sudo: required
-services: docker
-addons:
- apt:
- packages:
- - docker-ce
-
-env:
- - MAILU_VERSION=${TRAVIS_BRANCH////-}
-
-language: python
-python:
- - "3.6"
-install:
- - pip install -r tests/requirements.txt
- - sudo curl -L https://github.com/docker/compose/releases/download/1.23.0-rc3/docker-compose-`uname -s`-`uname -m` -o /usr/local/bin/docker-compose
- - sudo chmod +x /usr/local/bin/docker-compose
-
-before_script:
- - docker-compose -v
- - echo "$DOCKER_PW" | docker login --username $DOCKER_UN --password-stdin
- - docker-compose -f tests/build.yml build
- - sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*'
-
-
-script:
-# test.py, test name and timeout between start and tests.
- - python tests/compose/test.py core 1
- - python tests/compose/test.py fetchmail 1
- - travis_wait python tests/compose/test.py filters 10
- - python tests/compose/test.py rainloop 1
- - python tests/compose/test.py roundcube 1
- - python tests/compose/test.py webdav 1
-
-deploy:
- provider: script
- script: bash tests/deploy.sh
- on:
- all_branches: true
- condition: -n $DOCKER_UN
diff --git a/bors.toml b/bors.toml
index 5279fe72..272a6047 100644
--- a/bors.toml
+++ b/bors.toml
@@ -1,3 +1,4 @@
status = [
- "continuous-integration/travis-ci/push"
+ "CI-Done"
]
+
diff --git a/tests/compose/filters/00_create_users.sh b/tests/compose/filters/00_create_users.sh
new file mode 100755
index 00000000..3c581685
--- /dev/null
+++ b/tests/compose/filters/00_create_users.sh
@@ -0,0 +1,5 @@
+echo "Creating user required for next test ..."
+# Should not fail and update the password; update mode
+docker-compose -f tests/compose/filters/docker-compose.yml exec -T admin flask mailu admin admin mailu.io 'password' --mode=update || exit 1
+docker-compose -f tests/compose/filters/docker-compose.yml exec -T admin flask mailu user user mailu.io 'password' || exit 1
+echo "User created successfully"
From 54dd4cf224b510f657f0b07696cebf9e8d89d7c2 Mon Sep 17 00:00:00 2001
From: Dimitri Huisman
Date: Sat, 26 Jun 2021 19:16:56 +0000
Subject: [PATCH 098/181] Added new docker repo for test image. Adapted deploy
script to use env var for test repo name. Modified travis references to
github actions references in docs. Added changelog entry.
---
.github/worfklows/CI.yml | 4 +++-
docs/contributors/environment.rst | 14 +++++++-------
docs/contributors/workflow.rst | 2 +-
docs/faq.rst | 2 +-
tests/deploy.sh | 2 +-
towncrier/newsfragments/1828.misc | 1 +
6 files changed, 14 insertions(+), 11 deletions(-)
create mode 100644 towncrier/newsfragments/1828.misc
diff --git a/.github/worfklows/CI.yml b/.github/worfklows/CI.yml
index 3dcaf096..421fec3c 100644
--- a/.github/worfklows/CI.yml
+++ b/.github/worfklows/CI.yml
@@ -26,7 +26,8 @@ on:
# Password of docker login for pushing the images to repo $DOCKER_ORG
# DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
# The docker repository where the images are pushed to.
-#
+# DOCKER_ORG_TESTS: ${{ secrets.DOCKER_ORG_TESTS }}
+# The docker repository for test images. Only used for the branch TESTING (BORS try).
# Add the above secrets to your github repo to determine where the images will be pushed.
################################################
@@ -256,6 +257,7 @@ jobs:
DOCKER_UN: ${{ secrets.Docker_Login }}
DOCKER_PW: ${{ secrets.Docker_Password }}
DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
+ DOCKER_ORG_TESTS: ${{ secrets.DOCKER_ORG_TESTS }}
MAILU_VERSION: ${{ env.BRANCH }}
TRAVIS_BRANCH: ${{ env.BRANCH }}
TRAVIS_COMMIT_MESSAGE: ${{ github.event.head_commit.message }}
diff --git a/docs/contributors/environment.rst b/docs/contributors/environment.rst
index 26c04d0b..cef71c6c 100644
--- a/docs/contributors/environment.rst
+++ b/docs/contributors/environment.rst
@@ -178,9 +178,9 @@ In the case of a PR from a fellow team member, a single review is enough
to initiate merging. In all other cases, two approving reviews are required.
There is also a possibility to set the ``review/need2`` to require a second review.
-After Travis successfully tests the PR and the required amount of reviews are acquired,
+After the Github Action workflow successfully tests the PR and the required amount of reviews are acquired,
Mergify will trigger with a ``bors r+`` command. Bors will batch any approved PR's,
-merges them with master in a staging branch where Travis builds and tests the result.
+merges them with master in a staging branch where the Github Action workflow builds and tests the result.
After a successful test, the actual master gets fast-forwarded to that point.
System requirements
@@ -201,16 +201,16 @@ us on `Matrix`_.
Test images
```````````
-All PR's automatically get build by Travis, controlled by `bors-ng`_.
+All PR's automatically get build by a Github Action workflow, controlled by `bors-ng`_.
Some primitive auto testing is done.
The resulting images get uploaded to Docker hub, under the
-tag name ``mailutest/:pr-``.
+tag name ``mailuci/:pr-``.
For example, to test PR #500 against master, reviewers can use:
.. code-block:: bash
- export DOCKER_ORG="mailutest"
+ export DOCKER_ORG="mailuci"
export MAILU_VERSION="pr-500"
docker-compose pull
docker-compose up -d
@@ -232,8 +232,8 @@ after Bors confirms a successful build.
When bors try fails
```````````````````
-Sometimes Travis fails when another PR triggers a ``bors try`` command,
-before Travis cloned the git repository.
+Sometimes the Github Action workflow fails when another PR triggers a ``bors try`` command,
+before the Github Action workflow cloned the git repository.
Inspect the build log in the link provided by *bors-ng* to find out the cause.
If you see something like the following error on top of the logs,
feel free to write a comment with ``bors retry``.
diff --git a/docs/contributors/workflow.rst b/docs/contributors/workflow.rst
index 16dcef52..31ffd793 100644
--- a/docs/contributors/workflow.rst
+++ b/docs/contributors/workflow.rst
@@ -41,7 +41,7 @@ PR Workflow
-----------
All pull requests have to be against the main ``master`` branch.
-The PR gets build by Travis and some primitive auto-testing is done.
+The PR gets build by a Github Action workflow and some primitive auto-testing is done.
Test images get uploaded to a separate section in Docker hub.
Reviewers will check the PR and test the resulting images.
See the :ref:`testing` section for more info.
diff --git a/docs/faq.rst b/docs/faq.rst
index 9c4f1d75..c4cea444 100644
--- a/docs/faq.rst
+++ b/docs/faq.rst
@@ -61,7 +61,7 @@ have to prevent pushing out something quickly.
We currently maintain a strict work flow:
#. Someone writes a solution and sends a pull request;
-#. We use Travis-CI for some very basic building and testing;
+#. We use Github actions for some very basic building and testing;
#. The pull request needs to be code-reviewed and tested by at least two members
from the contributors team.
diff --git a/tests/deploy.sh b/tests/deploy.sh
index 21aec444..a836417b 100755
--- a/tests/deploy.sh
+++ b/tests/deploy.sh
@@ -5,7 +5,7 @@
# Retag in case of `bors try`
if [ "$TRAVIS_BRANCH" = "testing" ]; then
- export DOCKER_ORG="mailutest"
+ export DOCKER_ORG=$DOCKER_ORG_TESTS
# Commit message is like "Try #99".
# This sets the version tag to "pr-99"
export MAILU_VERSION="pr-${TRAVIS_COMMIT_MESSAGE//[!0-9]/}"
diff --git a/towncrier/newsfragments/1828.misc b/towncrier/newsfragments/1828.misc
new file mode 100644
index 00000000..09da59ad
--- /dev/null
+++ b/towncrier/newsfragments/1828.misc
@@ -0,0 +1 @@
+Switched from Travis to Github actions for CI/CD. Improved CI workflow to perform all tests in parallel.
From c6a38bbbccafd8e2623645da6c5759c9a23b4382 Mon Sep 17 00:00:00 2001
From: Dimitri Huisman <52963853+Diman0@users.noreply.github.com>
Date: Sat, 26 Jun 2021 21:50:55 +0200
Subject: [PATCH 099/181] Update CI.yml
---
.github/worfklows/CI.yml | 2 --
1 file changed, 2 deletions(-)
diff --git a/.github/worfklows/CI.yml b/.github/worfklows/CI.yml
index 421fec3c..e0462e1e 100644
--- a/.github/worfklows/CI.yml
+++ b/.github/worfklows/CI.yml
@@ -1,5 +1,3 @@
-
-
name: CI
on:
push:
From fb30a62629b6372450b34b6c9a7238d40013ef58 Mon Sep 17 00:00:00 2001
From: Dimitri Huisman <52963853+Diman0@users.noreply.github.com>
Date: Sat, 26 Jun 2021 21:52:09 +0200
Subject: [PATCH 100/181] Create CI.yml
---
.github/workflows/CI.yml | 276 +++++++++++++++++++++++++++++++++++++++
1 file changed, 276 insertions(+)
create mode 100644 .github/workflows/CI.yml
diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml
new file mode 100644
index 00000000..e0462e1e
--- /dev/null
+++ b/.github/workflows/CI.yml
@@ -0,0 +1,276 @@
+name: CI
+on:
+ push:
+ branches:
+ - staging
+ - testing
+ - '1.5'
+ - '1.6'
+ - '1.7'
+ - '1.8'
+ - master
+ # version tags, e.g. 1.7.1
+ - '[1-9].[0-9].[0-9]'
+ # pre-releases, e.g. 1.8-pre1
+ - 1.8-pre[0-9]
+ # test branches, e.g. test-debian
+ - test-*
+
+###############################################
+# REQUIRED secrets
+# DOCKER_UN: ${{ secrets.Docker_Login }}
+# Username of docker login for pushing the images to repo $DOCKER_ORG
+# DOCKER_PW: ${{ secrets.Docker_Password }}
+# Password of docker login for pushing the images to repo $DOCKER_ORG
+# DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
+# The docker repository where the images are pushed to.
+# DOCKER_ORG_TESTS: ${{ secrets.DOCKER_ORG_TESTS }}
+# The docker repository for test images. Only used for the branch TESTING (BORS try).
+# Add the above secrets to your github repo to determine where the images will be pushed.
+################################################
+
+jobs:
+ build:
+ name: build
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v2
+ - name: Extract branch name
+ shell: bash
+ run: |
+ echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
+ - name: install python packages
+ run: python3 -m pip install -r tests/requirements.txt
+ - name: check docker-compose version
+ run: docker-compose -v
+ - name: login docker
+ env:
+ DOCKER_UN: ${{ secrets.Docker_Login }}
+ DOCKER_PW: ${{ secrets.Docker_Password }}
+ DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
+ run: echo "$DOCKER_PW" | docker login --username $DOCKER_UN --password-stdin
+ # In this step, this action saves a list of existing images,
+ # the cache is created without them in the post run.
+ # It also restores the cache if it exists.
+ - uses: satackey/action-docker-layer-caching@v0.0.11
+ # Ignore the failure of a step and avoid terminating the job.
+ continue-on-error: true
+ - name: build all docker images
+ env:
+ MAILU_VERSION: ${{ env.BRANCH }}
+ TRAVIS_BRANCH: ${{ env.BRANCH }}
+ DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
+ run: docker-compose -f tests/build.yml build
+
+#NOTE: It appears the filter test depends on the core test. The filter test requires an email user
+#that is created by the core test.
+ core-test:
+ name: core test
+ runs-on: ubuntu-latest
+ needs:
+ - build
+ steps:
+ - uses: actions/checkout@v2
+ - name: Extract branch name
+ shell: bash
+ run: |
+ echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
+ - name: install python packages
+ run: python3 -m pip install -r tests/requirements.txt
+ - uses: satackey/action-docker-layer-caching@v0.0.11
+ # Ignore the failure of a step and avoid terminating the job.
+ continue-on-error: true
+ - name: copy all certs
+ run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*'
+ - name: test core suite
+ run: python tests/compose/test.py core 1
+ env:
+ MAILU_VERSION: ${{ env.BRANCH }}
+ TRAVIS_BRANCH: ${{ env.BRANCH }}
+ DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
+
+ filter-test:
+ name: filter test
+ runs-on: ubuntu-latest
+ needs:
+ - build
+ steps:
+ - uses: actions/checkout@v2
+ - name: Extract branch name
+ shell: bash
+ run: |
+ echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
+ - name: install python packages
+ run: python3 -m pip install -r tests/requirements.txt
+ - uses: satackey/action-docker-layer-caching@v0.0.11
+ # Ignore the failure of a step and avoid terminating the job.
+ continue-on-error: true
+ - name: copy all certs
+ run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*'
+ - name: 'test clamvav'
+ run: python tests/compose/test.py filters 2
+ env:
+ MAILU_VERSION: ${{ env.BRANCH }}
+ TRAVIS_BRANCH: ${{ env.BRANCH }}
+ DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
+
+ fetch-test:
+ name: fetch test
+ runs-on: ubuntu-latest
+ needs:
+ - build
+ steps:
+ - uses: actions/checkout@v2
+ - name: Extract branch name
+ shell: bash
+ run: |
+ echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
+ - name: install python packages
+ run: python3 -m pip install -r tests/requirements.txt
+ - uses: satackey/action-docker-layer-caching@v0.0.11
+ # Ignore the failure of a step and avoid terminating the job.
+ continue-on-error: true
+ - name: copy all certs
+ run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*'
+ - name: test fetch
+ run: python tests/compose/test.py fetchmail 1
+ env:
+ MAILU_VERSION: ${{ env.BRANCH }}
+ TRAVIS_BRANCH: ${{ env.BRANCH }}
+ DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
+
+ rainloop-test:
+ name: rainloop test
+ runs-on: ubuntu-latest
+ needs:
+ - build
+ steps:
+ - uses: actions/checkout@v2
+ - name: Extract branch name
+ shell: bash
+ run: |
+ echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
+ - name: install python packages
+ run: python3 -m pip install -r tests/requirements.txt
+ - uses: satackey/action-docker-layer-caching@v0.0.11
+ # Ignore the failure of a step and avoid terminating the job.
+ continue-on-error: true
+ - name: copy all certs
+ run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*'
+ - name: test rainloop
+ run: python tests/compose/test.py rainloop 1
+ env:
+ MAILU_VERSION: ${{ env.BRANCH }}
+ TRAVIS_BRANCH: ${{ env.BRANCH }}
+ DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
+
+ roundcube-test:
+ name: roundcube test
+ runs-on: ubuntu-latest
+ needs:
+ - build
+ steps:
+ - uses: actions/checkout@v2
+ - name: Extract branch name
+ shell: bash
+ run: |
+ echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
+ - name: install python packages
+ run: python3 -m pip install -r tests/requirements.txt
+ - uses: satackey/action-docker-layer-caching@v0.0.11
+ # Ignore the failure of a step and avoid terminating the job.
+ continue-on-error: true
+ - name: copy all certs
+ run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*'
+ - name: test roundcube
+ run: python tests/compose/test.py roundcube 1
+ env:
+ MAILU_VERSION: ${{ env.BRANCH }}
+ TRAVIS_BRANCH: ${{ env.BRANCH }}
+ DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
+
+ webdav-test:
+ name: webdav test
+ runs-on: ubuntu-latest
+ needs:
+ - build
+ steps:
+ - uses: actions/checkout@v2
+ - name: Extract branch name
+ shell: bash
+ run: |
+ echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
+ - name: install python packages
+ run: python3 -m pip install -r tests/requirements.txt
+ - uses: satackey/action-docker-layer-caching@v0.0.11
+ # Ignore the failure of a step and avoid terminating the job.
+ continue-on-error: true
+ - name: copy all certs
+ run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*'
+ - name: test webdav
+ run: python tests/compose/test.py webdav 1
+ env:
+ MAILU_VERSION: ${{ env.BRANCH }}
+ TRAVIS_BRANCH: ${{ env.BRANCH }}
+ DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
+
+ deploy:
+ name: deploy step
+ runs-on: ubuntu-latest
+ needs:
+ - build
+ - core-test
+ - filter-test
+ - fetch-test
+ - rainloop-test
+ - roundcube-test
+ - webdav-test
+ steps:
+ - uses: actions/checkout@v2
+ - name: Extract branch name
+ shell: bash
+ run: |
+ echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
+ - name: install python packages
+ run: python3 -m pip install -r tests/requirements.txt
+ - uses: satackey/action-docker-layer-caching@v0.0.11
+ # Ignore the failure of a step and avoid terminating the job.
+ continue-on-error: true
+ - name: copy all certs
+ run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*'
+ - name: login docker
+ env:
+ DOCKER_UN: ${{ secrets.Docker_Login }}
+ DOCKER_PW: ${{ secrets.Docker_Password }}
+ run: echo "$DOCKER_PW" | docker login --username $DOCKER_UN --password-stdin
+ - name: build all docker images
+ run: docker-compose -f tests/build.yml build
+ env:
+ MAILU_VERSION: ${{ env.BRANCH }}
+ TRAVIS_BRANCH: ${{ env.BRANCH }}
+ DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
+ - name: deploy built docker images
+ env:
+ DOCKER_UN: ${{ secrets.Docker_Login }}
+ DOCKER_PW: ${{ secrets.Docker_Password }}
+ DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
+ DOCKER_ORG_TESTS: ${{ secrets.DOCKER_ORG_TESTS }}
+ MAILU_VERSION: ${{ env.BRANCH }}
+ TRAVIS_BRANCH: ${{ env.BRANCH }}
+ TRAVIS_COMMIT_MESSAGE: ${{ github.event.head_commit.message }}
+ run: bash tests/deploy.sh
+
+ # This job is watched by bors. It only complets if building,testing and deploy worked.
+ ci-success:
+ name: CI-Done
+ #Returns true when none of the **previous** steps have failed or been canceled.
+ if: ${{ success() }}
+ needs:
+ - deploy
+ runs-on: ubuntu-latest
+ steps:
+ - name: CI/CD succeeded.
+ run: exit 0
+
+
From 006da4c5e409526ccf5fb3e62ac512d3d5e79593 Mon Sep 17 00:00:00 2001
From: Dimitri Huisman <52963853+Diman0@users.noreply.github.com>
Date: Sat, 26 Jun 2021 21:52:51 +0200
Subject: [PATCH 101/181] My mistake. A typo
---
.github/worfklows/CI.yml | 276 ---------------------------------------
1 file changed, 276 deletions(-)
delete mode 100644 .github/worfklows/CI.yml
diff --git a/.github/worfklows/CI.yml b/.github/worfklows/CI.yml
deleted file mode 100644
index e0462e1e..00000000
--- a/.github/worfklows/CI.yml
+++ /dev/null
@@ -1,276 +0,0 @@
-name: CI
-on:
- push:
- branches:
- - staging
- - testing
- - '1.5'
- - '1.6'
- - '1.7'
- - '1.8'
- - master
- # version tags, e.g. 1.7.1
- - '[1-9].[0-9].[0-9]'
- # pre-releases, e.g. 1.8-pre1
- - 1.8-pre[0-9]
- # test branches, e.g. test-debian
- - test-*
-
-###############################################
-# REQUIRED secrets
-# DOCKER_UN: ${{ secrets.Docker_Login }}
-# Username of docker login for pushing the images to repo $DOCKER_ORG
-# DOCKER_PW: ${{ secrets.Docker_Password }}
-# Password of docker login for pushing the images to repo $DOCKER_ORG
-# DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
-# The docker repository where the images are pushed to.
-# DOCKER_ORG_TESTS: ${{ secrets.DOCKER_ORG_TESTS }}
-# The docker repository for test images. Only used for the branch TESTING (BORS try).
-# Add the above secrets to your github repo to determine where the images will be pushed.
-################################################
-
-jobs:
- build:
- name: build
- runs-on: ubuntu-latest
-
- steps:
- - uses: actions/checkout@v2
- - name: Extract branch name
- shell: bash
- run: |
- echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
- - name: install python packages
- run: python3 -m pip install -r tests/requirements.txt
- - name: check docker-compose version
- run: docker-compose -v
- - name: login docker
- env:
- DOCKER_UN: ${{ secrets.Docker_Login }}
- DOCKER_PW: ${{ secrets.Docker_Password }}
- DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
- run: echo "$DOCKER_PW" | docker login --username $DOCKER_UN --password-stdin
- # In this step, this action saves a list of existing images,
- # the cache is created without them in the post run.
- # It also restores the cache if it exists.
- - uses: satackey/action-docker-layer-caching@v0.0.11
- # Ignore the failure of a step and avoid terminating the job.
- continue-on-error: true
- - name: build all docker images
- env:
- MAILU_VERSION: ${{ env.BRANCH }}
- TRAVIS_BRANCH: ${{ env.BRANCH }}
- DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
- run: docker-compose -f tests/build.yml build
-
-#NOTE: It appears the filter test depends on the core test. The filter test requires an email user
-#that is created by the core test.
- core-test:
- name: core test
- runs-on: ubuntu-latest
- needs:
- - build
- steps:
- - uses: actions/checkout@v2
- - name: Extract branch name
- shell: bash
- run: |
- echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
- - name: install python packages
- run: python3 -m pip install -r tests/requirements.txt
- - uses: satackey/action-docker-layer-caching@v0.0.11
- # Ignore the failure of a step and avoid terminating the job.
- continue-on-error: true
- - name: copy all certs
- run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*'
- - name: test core suite
- run: python tests/compose/test.py core 1
- env:
- MAILU_VERSION: ${{ env.BRANCH }}
- TRAVIS_BRANCH: ${{ env.BRANCH }}
- DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
-
- filter-test:
- name: filter test
- runs-on: ubuntu-latest
- needs:
- - build
- steps:
- - uses: actions/checkout@v2
- - name: Extract branch name
- shell: bash
- run: |
- echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
- - name: install python packages
- run: python3 -m pip install -r tests/requirements.txt
- - uses: satackey/action-docker-layer-caching@v0.0.11
- # Ignore the failure of a step and avoid terminating the job.
- continue-on-error: true
- - name: copy all certs
- run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*'
- - name: 'test clamvav'
- run: python tests/compose/test.py filters 2
- env:
- MAILU_VERSION: ${{ env.BRANCH }}
- TRAVIS_BRANCH: ${{ env.BRANCH }}
- DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
-
- fetch-test:
- name: fetch test
- runs-on: ubuntu-latest
- needs:
- - build
- steps:
- - uses: actions/checkout@v2
- - name: Extract branch name
- shell: bash
- run: |
- echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
- - name: install python packages
- run: python3 -m pip install -r tests/requirements.txt
- - uses: satackey/action-docker-layer-caching@v0.0.11
- # Ignore the failure of a step and avoid terminating the job.
- continue-on-error: true
- - name: copy all certs
- run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*'
- - name: test fetch
- run: python tests/compose/test.py fetchmail 1
- env:
- MAILU_VERSION: ${{ env.BRANCH }}
- TRAVIS_BRANCH: ${{ env.BRANCH }}
- DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
-
- rainloop-test:
- name: rainloop test
- runs-on: ubuntu-latest
- needs:
- - build
- steps:
- - uses: actions/checkout@v2
- - name: Extract branch name
- shell: bash
- run: |
- echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
- - name: install python packages
- run: python3 -m pip install -r tests/requirements.txt
- - uses: satackey/action-docker-layer-caching@v0.0.11
- # Ignore the failure of a step and avoid terminating the job.
- continue-on-error: true
- - name: copy all certs
- run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*'
- - name: test rainloop
- run: python tests/compose/test.py rainloop 1
- env:
- MAILU_VERSION: ${{ env.BRANCH }}
- TRAVIS_BRANCH: ${{ env.BRANCH }}
- DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
-
- roundcube-test:
- name: roundcube test
- runs-on: ubuntu-latest
- needs:
- - build
- steps:
- - uses: actions/checkout@v2
- - name: Extract branch name
- shell: bash
- run: |
- echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
- - name: install python packages
- run: python3 -m pip install -r tests/requirements.txt
- - uses: satackey/action-docker-layer-caching@v0.0.11
- # Ignore the failure of a step and avoid terminating the job.
- continue-on-error: true
- - name: copy all certs
- run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*'
- - name: test roundcube
- run: python tests/compose/test.py roundcube 1
- env:
- MAILU_VERSION: ${{ env.BRANCH }}
- TRAVIS_BRANCH: ${{ env.BRANCH }}
- DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
-
- webdav-test:
- name: webdav test
- runs-on: ubuntu-latest
- needs:
- - build
- steps:
- - uses: actions/checkout@v2
- - name: Extract branch name
- shell: bash
- run: |
- echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
- - name: install python packages
- run: python3 -m pip install -r tests/requirements.txt
- - uses: satackey/action-docker-layer-caching@v0.0.11
- # Ignore the failure of a step and avoid terminating the job.
- continue-on-error: true
- - name: copy all certs
- run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*'
- - name: test webdav
- run: python tests/compose/test.py webdav 1
- env:
- MAILU_VERSION: ${{ env.BRANCH }}
- TRAVIS_BRANCH: ${{ env.BRANCH }}
- DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
-
- deploy:
- name: deploy step
- runs-on: ubuntu-latest
- needs:
- - build
- - core-test
- - filter-test
- - fetch-test
- - rainloop-test
- - roundcube-test
- - webdav-test
- steps:
- - uses: actions/checkout@v2
- - name: Extract branch name
- shell: bash
- run: |
- echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
- - name: install python packages
- run: python3 -m pip install -r tests/requirements.txt
- - uses: satackey/action-docker-layer-caching@v0.0.11
- # Ignore the failure of a step and avoid terminating the job.
- continue-on-error: true
- - name: copy all certs
- run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*'
- - name: login docker
- env:
- DOCKER_UN: ${{ secrets.Docker_Login }}
- DOCKER_PW: ${{ secrets.Docker_Password }}
- run: echo "$DOCKER_PW" | docker login --username $DOCKER_UN --password-stdin
- - name: build all docker images
- run: docker-compose -f tests/build.yml build
- env:
- MAILU_VERSION: ${{ env.BRANCH }}
- TRAVIS_BRANCH: ${{ env.BRANCH }}
- DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
- - name: deploy built docker images
- env:
- DOCKER_UN: ${{ secrets.Docker_Login }}
- DOCKER_PW: ${{ secrets.Docker_Password }}
- DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
- DOCKER_ORG_TESTS: ${{ secrets.DOCKER_ORG_TESTS }}
- MAILU_VERSION: ${{ env.BRANCH }}
- TRAVIS_BRANCH: ${{ env.BRANCH }}
- TRAVIS_COMMIT_MESSAGE: ${{ github.event.head_commit.message }}
- run: bash tests/deploy.sh
-
- # This job is watched by bors. It only complets if building,testing and deploy worked.
- ci-success:
- name: CI-Done
- #Returns true when none of the **previous** steps have failed or been canceled.
- if: ${{ success() }}
- needs:
- - deploy
- runs-on: ubuntu-latest
- steps:
- - name: CI/CD succeeded.
- run: exit 0
-
-
From 606c039a6ffbcb63bb28bb1b422d0e622ebc0088 Mon Sep 17 00:00:00 2001
From: Dimitri Huisman
Date: Sat, 26 Jun 2021 21:00:51 +0000
Subject: [PATCH 102/181] Switch back to sequential workflow
---
.github/workflows/CI.yml | 126 ++---------------------
tests/compose/filters/00_create_users.sh | 5 -
2 files changed, 6 insertions(+), 125 deletions(-)
delete mode 100755 tests/compose/filters/00_create_users.sh
diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml
index e0462e1e..8cd9a8d3 100644
--- a/.github/workflows/CI.yml
+++ b/.github/workflows/CI.yml
@@ -30,8 +30,8 @@ on:
################################################
jobs:
- build:
- name: build
+ build-test:
+ name: build and test
runs-on: ubuntu-latest
steps:
@@ -63,24 +63,6 @@ jobs:
DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
run: docker-compose -f tests/build.yml build
-#NOTE: It appears the filter test depends on the core test. The filter test requires an email user
-#that is created by the core test.
- core-test:
- name: core test
- runs-on: ubuntu-latest
- needs:
- - build
- steps:
- - uses: actions/checkout@v2
- - name: Extract branch name
- shell: bash
- run: |
- echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
- - name: install python packages
- run: python3 -m pip install -r tests/requirements.txt
- - uses: satackey/action-docker-layer-caching@v0.0.11
- # Ignore the failure of a step and avoid terminating the job.
- continue-on-error: true
- name: copy all certs
run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*'
- name: test core suite
@@ -90,74 +72,20 @@ jobs:
TRAVIS_BRANCH: ${{ env.BRANCH }}
DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
- filter-test:
- name: filter test
- runs-on: ubuntu-latest
- needs:
- - build
- steps:
- - uses: actions/checkout@v2
- - name: Extract branch name
- shell: bash
- run: |
- echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
- - name: install python packages
- run: python3 -m pip install -r tests/requirements.txt
- - uses: satackey/action-docker-layer-caching@v0.0.11
- # Ignore the failure of a step and avoid terminating the job.
- continue-on-error: true
- - name: copy all certs
- run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*'
- name: 'test clamvav'
run: python tests/compose/test.py filters 2
env:
MAILU_VERSION: ${{ env.BRANCH }}
TRAVIS_BRANCH: ${{ env.BRANCH }}
DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
-
- fetch-test:
- name: fetch test
- runs-on: ubuntu-latest
- needs:
- - build
- steps:
- - uses: actions/checkout@v2
- - name: Extract branch name
- shell: bash
- run: |
- echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
- - name: install python packages
- run: python3 -m pip install -r tests/requirements.txt
- - uses: satackey/action-docker-layer-caching@v0.0.11
- # Ignore the failure of a step and avoid terminating the job.
- continue-on-error: true
- - name: copy all certs
- run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*'
+
- name: test fetch
run: python tests/compose/test.py fetchmail 1
env:
MAILU_VERSION: ${{ env.BRANCH }}
TRAVIS_BRANCH: ${{ env.BRANCH }}
DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
-
- rainloop-test:
- name: rainloop test
- runs-on: ubuntu-latest
- needs:
- - build
- steps:
- - uses: actions/checkout@v2
- - name: Extract branch name
- shell: bash
- run: |
- echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
- - name: install python packages
- run: python3 -m pip install -r tests/requirements.txt
- - uses: satackey/action-docker-layer-caching@v0.0.11
- # Ignore the failure of a step and avoid terminating the job.
- continue-on-error: true
- - name: copy all certs
- run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*'
+
- name: test rainloop
run: python tests/compose/test.py rainloop 1
env:
@@ -165,49 +93,13 @@ jobs:
TRAVIS_BRANCH: ${{ env.BRANCH }}
DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
- roundcube-test:
- name: roundcube test
- runs-on: ubuntu-latest
- needs:
- - build
- steps:
- - uses: actions/checkout@v2
- - name: Extract branch name
- shell: bash
- run: |
- echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
- - name: install python packages
- run: python3 -m pip install -r tests/requirements.txt
- - uses: satackey/action-docker-layer-caching@v0.0.11
- # Ignore the failure of a step and avoid terminating the job.
- continue-on-error: true
- - name: copy all certs
- run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*'
- name: test roundcube
run: python tests/compose/test.py roundcube 1
env:
MAILU_VERSION: ${{ env.BRANCH }}
TRAVIS_BRANCH: ${{ env.BRANCH }}
DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
-
- webdav-test:
- name: webdav test
- runs-on: ubuntu-latest
- needs:
- - build
- steps:
- - uses: actions/checkout@v2
- - name: Extract branch name
- shell: bash
- run: |
- echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
- - name: install python packages
- run: python3 -m pip install -r tests/requirements.txt
- - uses: satackey/action-docker-layer-caching@v0.0.11
- # Ignore the failure of a step and avoid terminating the job.
- continue-on-error: true
- - name: copy all certs
- run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*'
+
- name: test webdav
run: python tests/compose/test.py webdav 1
env:
@@ -219,13 +111,7 @@ jobs:
name: deploy step
runs-on: ubuntu-latest
needs:
- - build
- - core-test
- - filter-test
- - fetch-test
- - rainloop-test
- - roundcube-test
- - webdav-test
+ - build-test
steps:
- uses: actions/checkout@v2
- name: Extract branch name
diff --git a/tests/compose/filters/00_create_users.sh b/tests/compose/filters/00_create_users.sh
deleted file mode 100755
index 3c581685..00000000
--- a/tests/compose/filters/00_create_users.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-echo "Creating user required for next test ..."
-# Should not fail and update the password; update mode
-docker-compose -f tests/compose/filters/docker-compose.yml exec -T admin flask mailu admin admin mailu.io 'password' --mode=update || exit 1
-docker-compose -f tests/compose/filters/docker-compose.yml exec -T admin flask mailu user user mailu.io 'password' || exit 1
-echo "User created successfully"
From 24200ddb670cf500d83695241bb158a4324e94b7 Mon Sep 17 00:00:00 2001
From: Dimitri Huisman
Date: Sat, 26 Jun 2021 21:49:37 +0000
Subject: [PATCH 103/181] Forgot to remove duplicate steps when switching back
to sequential workflow
---
.github/workflows/CI.yml | 35 +++--------------------------------
1 file changed, 3 insertions(+), 32 deletions(-)
diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml
index 8cd9a8d3..71897c52 100644
--- a/.github/workflows/CI.yml
+++ b/.github/workflows/CI.yml
@@ -30,7 +30,7 @@ on:
################################################
jobs:
- build-test:
+ build-test-deploy:
name: build and test
runs-on: ubuntu-latest
@@ -106,36 +106,7 @@ jobs:
MAILU_VERSION: ${{ env.BRANCH }}
TRAVIS_BRANCH: ${{ env.BRANCH }}
DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
-
- deploy:
- name: deploy step
- runs-on: ubuntu-latest
- needs:
- - build-test
- steps:
- - uses: actions/checkout@v2
- - name: Extract branch name
- shell: bash
- run: |
- echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
- - name: install python packages
- run: python3 -m pip install -r tests/requirements.txt
- - uses: satackey/action-docker-layer-caching@v0.0.11
- # Ignore the failure of a step and avoid terminating the job.
- continue-on-error: true
- - name: copy all certs
- run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*'
- - name: login docker
- env:
- DOCKER_UN: ${{ secrets.Docker_Login }}
- DOCKER_PW: ${{ secrets.Docker_Password }}
- run: echo "$DOCKER_PW" | docker login --username $DOCKER_UN --password-stdin
- - name: build all docker images
- run: docker-compose -f tests/build.yml build
- env:
- MAILU_VERSION: ${{ env.BRANCH }}
- TRAVIS_BRANCH: ${{ env.BRANCH }}
- DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
+
- name: deploy built docker images
env:
DOCKER_UN: ${{ secrets.Docker_Login }}
@@ -153,7 +124,7 @@ jobs:
#Returns true when none of the **previous** steps have failed or been canceled.
if: ${{ success() }}
needs:
- - deploy
+ - build-test-deploy
runs-on: ubuntu-latest
steps:
- name: CI/CD succeeded.
From b560d1f36998c4c2f65875cbb88f8fbc319ffbe2 Mon Sep 17 00:00:00 2001
From: Nicolas Paris
Date: Sun, 27 Jun 2021 10:38:32 +0200
Subject: [PATCH 104/181] Improve english
Co-authored-by: decentral1se <1991377+decentral1se@users.noreply.github.com>
---
docs/database.rst | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/docs/database.rst b/docs/database.rst
index c13ca0bf..0f8318d5 100644
--- a/docs/database.rst
+++ b/docs/database.rst
@@ -8,8 +8,8 @@ This functionality should still be considered experimental!
Mailu Postgresql
----------------
-Mailu optionally comes with a pre-configured Postgresql image, wich as of 1.8 is deprecated
-will be removed in 1.9
+Mailu optionally comes with a pre-configured Postgresql image, which as of 1.8, is deprecated
+and will be removed in 1.9.
This images has the following features:
- Automatic creation of users, db, extensions and password;
From c0c8c4a55113237274ca4b4e32c9c46e5297998f Mon Sep 17 00:00:00 2001
From: Nicolas Paris
Date: Sun, 27 Jun 2021 10:46:28 +0200
Subject: [PATCH 105/181] Fix typo
Co-authored-by: decentral1se <1991377+decentral1se@users.noreply.github.com>
---
setup/templates/steps/database.html | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/setup/templates/steps/database.html b/setup/templates/steps/database.html
index d7184110..2727687f 100644
--- a/setup/templates/steps/database.html
+++ b/setup/templates/steps/database.html
@@ -46,7 +46,7 @@
-
+
From ab7264df0c9a115a7e9cf63cd4ff55e380c3e7f7 Mon Sep 17 00:00:00 2001
From: Nicolas Paris
Date: Sun, 27 Jun 2021 10:46:41 +0200
Subject: [PATCH 106/181] Fix typo
Co-authored-by: decentral1se <1991377+decentral1se@users.noreply.github.com>
---
setup/templates/steps/database.html | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/setup/templates/steps/database.html b/setup/templates/steps/database.html
index 2727687f..be32fe0b 100644
--- a/setup/templates/steps/database.html
+++ b/setup/templates/steps/database.html
@@ -44,7 +44,7 @@
-
+
From 7386257dedda678419e495c28e382251f7212bfa Mon Sep 17 00:00:00 2001
From: Nicolas Paris
Date: Sun, 27 Jun 2021 10:47:28 +0200
Subject: [PATCH 107/181] Fix typo
Co-authored-by: decentral1se <1991377+decentral1se@users.noreply.github.com>
---
setup/templates/steps/database.html | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/setup/templates/steps/database.html b/setup/templates/steps/database.html
index be32fe0b..5e942061 100644
--- a/setup/templates/steps/database.html
+++ b/setup/templates/steps/database.html
@@ -28,7 +28,7 @@
-
+
From e16e9f19fdff9d068f3d640e9a3818513461fb60 Mon Sep 17 00:00:00 2001
From: Dimitri Huisman
Date: Sun, 27 Jun 2021 08:54:17 +0000
Subject: [PATCH 109/181] Run test jobs in parallel for CI/CD.
---
.github/workflows/CI.yml | 213 ++++++++++++++++++++---
tests/compose/filters/00_create_users.sh | 5 +
2 files changed, 190 insertions(+), 28 deletions(-)
create mode 100755 tests/compose/filters/00_create_users.sh
diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml
index 71897c52..56cced1b 100644
--- a/.github/workflows/CI.yml
+++ b/.github/workflows/CI.yml
@@ -25,23 +25,27 @@ on:
# DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
# The docker repository where the images are pushed to.
# DOCKER_ORG_TESTS: ${{ secrets.DOCKER_ORG_TESTS }}
-# The docker repository for test images. Only used for the branch TESTING (BORS try).
+# The docker repository for test images. Only used for the branch TESTING (BORS try).
# Add the above secrets to your github repo to determine where the images will be pushed.
################################################
jobs:
- build-test-deploy:
- name: build and test
+ build:
+ name: build images
runs-on: ubuntu-latest
-
steps:
- uses: actions/checkout@v2
- name: Extract branch name
shell: bash
run: |
echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
- - name: install python packages
- run: python3 -m pip install -r tests/requirements.txt
+ - name: create folder for storing images
+ run: sudo mkdir -p /images
+ - name: configure images folder for caching
+ using: actions/cache@v2
+ with:
+ path: /images
+ key: ${{ env.BRANCH }}-${{ github.run_id }}-${{ github.run_number }}
- name: check docker-compose version
run: docker-compose -v
- name: login docker
@@ -49,22 +53,39 @@ jobs:
DOCKER_UN: ${{ secrets.Docker_Login }}
DOCKER_PW: ${{ secrets.Docker_Password }}
DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
- run: echo "$DOCKER_PW" | docker login --username $DOCKER_UN --password-stdin
- # In this step, this action saves a list of existing images,
- # the cache is created without them in the post run.
- # It also restores the cache if it exists.
- - uses: satackey/action-docker-layer-caching@v0.0.11
- # Ignore the failure of a step and avoid terminating the job.
- continue-on-error: true
+ run: echo "$DOCKER_PW" | docker login --username $DOCKER_UN --password-stdin
- name: build all docker images
env:
MAILU_VERSION: ${{ env.BRANCH }}
TRAVIS_BRANCH: ${{ env.BRANCH }}
DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
run: docker-compose -f tests/build.yml build
+ - name: save all docker images
+ run: docker save ${{ secrets.DOCKER_ORG }}/admin ${{ secrets.DOCKER_ORG }}/clamav ${{ secrets.DOCKER_ORG }}/docs ${{ secrets.DOCKER_ORG }}/dovecot ${{ secrets.DOCKER_ORG }}/fetchmail ${{ secrets.DOCKER_ORG }}/nginx ${{ secrets.DOCKER_ORG }}/none ${{ secrets.DOCKER_ORG }}/postfix ${{ secrets.DOCKER_ORG }}/postgresql ${{ secrets.DOCKER_ORG }}/radicale ${{ secrets.DOCKER_ORG }}/rainloop ${{ secrets.DOCKER_ORG }}/roundcube ${{ secrets.DOCKER_ORG }}/rspamd ${{ secrets.DOCKER_ORG }}/setup ${{ secrets.DOCKER_ORG }}/traefik-certdumper ${{ secrets.DOCKER_ORG }}/unbound -o /images/images.tar.gz
+
+ test-core:
+ name: Perform core tests
+ runs-on: ubuntu-latest
+ needs:
+ - build
+ steps:
+ - uses: actions/checkout@v2
+ - name: Extract branch name
+ shell: bash
+ run: |
+ echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
+ - name: create folder for storing images
+ run: sudo mkdir -p /images
+ - name: configure images folder for caching
+ using: actions/cache@v2
+ with:
+ path: /images
+ key: ${{ env.BRANCH }}-${{ github.run_id }}-${{ github.run_number }}
+ - name: install python packages
+ run: python3 -m pip install -r tests/requirements.txt
- name: copy all certs
- run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*'
+ run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*'
- name: test core suite
run: python tests/compose/test.py core 1
env:
@@ -72,41 +93,179 @@ jobs:
TRAVIS_BRANCH: ${{ env.BRANCH }}
DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
- - name: 'test clamvav'
- run: python tests/compose/test.py filters 2
- env:
- MAILU_VERSION: ${{ env.BRANCH }}
- TRAVIS_BRANCH: ${{ env.BRANCH }}
- DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
-
+ test-fetchmail:
+ name: Perform fetchmail tests
+ runs-on: ubuntu-latest
+ needs:
+ - build
+ steps:
+ - uses: actions/checkout@v2
+ - name: Extract branch name
+ shell: bash
+ run: |
+ echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
+ - name: create folder for storing images
+ run: sudo mkdir -p /images
+ - name: configure images folder for caching
+ using: actions/cache@v2
+ with:
+ path: /images
+ key: ${{ env.BRANCH }}-${{ github.run_id }}-${{ github.run_number }}
+ - name: install python packages
+ run: python3 -m pip install -r tests/requirements.txt
+ - name: copy all certs
+ run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*'
- name: test fetch
run: python tests/compose/test.py fetchmail 1
env:
MAILU_VERSION: ${{ env.BRANCH }}
TRAVIS_BRANCH: ${{ env.BRANCH }}
DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
-
+
+ test-filters:
+ name: Perform filter tests
+ runs-on: ubuntu-latest
+ needs:
+ - build
+ steps:
+ - uses: actions/checkout@v2
+ - name: Extract branch name
+ shell: bash
+ run: |
+ echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
+ - name: create folder for storing images
+ run: sudo mkdir -p /images
+ - name: configure images folder for caching
+ using: actions/cache@v2
+ with:
+ path: /images
+ key: ${{ env.BRANCH }}-${{ github.run_id }}-${{ github.run_number }}
+ - name: install python packages
+ run: python3 -m pip install -r tests/requirements.txt
+ - name: copy all certs
+ run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*'
+ - name: 'test clamvav'
+ run: python tests/compose/test.py filters 2
+ env:
+ MAILU_VERSION: ${{ env.BRANCH }}
+ TRAVIS_BRANCH: ${{ env.BRANCH }}
+ DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
+
+ test-rainloop:
+ name: Perform rainloop tests
+ runs-on: ubuntu-latest
+ needs:
+ - build
+ steps:
+ - uses: actions/checkout@v2
+ - name: Extract branch name
+ shell: bash
+ run: |
+ echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
+ - name: create folder for storing images
+ run: sudo mkdir -p /images
+ - name: configure images folder for caching
+ using: actions/cache@v2
+ with:
+ path: /images
+ key: ${{ env.BRANCH }}-${{ github.run_id }}-${{ github.run_number }}
+ - name: install python packages
+ run: python3 -m pip install -r tests/requirements.txt
+ - name: copy all certs
+ run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*'
- name: test rainloop
run: python tests/compose/test.py rainloop 1
env:
MAILU_VERSION: ${{ env.BRANCH }}
TRAVIS_BRANCH: ${{ env.BRANCH }}
DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
-
+
+ test-roundcube:
+ name: Perform roundcube tests
+ runs-on: ubuntu-latest
+ needs:
+ - build
+ steps:
+ - uses: actions/checkout@v2
+ - name: Extract branch name
+ shell: bash
+ run: |
+ echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
+ - name: create folder for storing images
+ run: sudo mkdir -p /images
+ - name: configure images folder for caching
+ using: actions/cache@v2
+ with:
+ path: /images
+ key: ${{ env.BRANCH }}-${{ github.run_id }}-${{ github.run_number }}
+ - name: install python packages
+ run: python3 -m pip install -r tests/requirements.txt
+ - name: copy all certs
+ run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*'
- name: test roundcube
run: python tests/compose/test.py roundcube 1
env:
MAILU_VERSION: ${{ env.BRANCH }}
TRAVIS_BRANCH: ${{ env.BRANCH }}
DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
-
+
+ test-webdav:
+ name: Perform webdav tests
+ runs-on: ubuntu-latest
+ needs:
+ - build
+ steps:
+ - uses: actions/checkout@v2
+ - name: Extract branch name
+ shell: bash
+ run: |
+ echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
+ - name: create folder for storing images
+ run: sudo mkdir -p /images
+ - name: configure images folder for caching
+ using: actions/cache@v2
+ with:
+ path: /images
+ key: ${{ env.BRANCH }}-${{ github.run_id }}-${{ github.run_number }}
+ - name: install python packages
+ run: python3 -m pip install -r tests/requirements.txt
+ - name: copy all certs
+ run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*'
- name: test webdav
run: python tests/compose/test.py webdav 1
env:
MAILU_VERSION: ${{ env.BRANCH }}
TRAVIS_BRANCH: ${{ env.BRANCH }}
DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
-
+
+ deploy:
+ name: Deploy images
+ runs-on: ubuntu-latest
+ needs:
+ - build
+ - test-core
+ - test-fetchmail
+ - test-filters
+ - test-rainloop
+ - test-roundcube
+ - test-webdav
+ steps:
+ - uses: actions/checkout@v2
+ - name: Extract branch name
+ shell: bash
+ run: |
+ echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
+ - name: create folder for storing images
+ run: sudo mkdir -p /images
+ - name: configure images folder for caching
+ # For staging we do not deploy images. So we do not have to load them from cache.
+ if: ${{ env.BRANCH != 'staging' }}
+ using: actions/cache@v2
+ with:
+ path: /images
+ key: ${{ env.BRANCH }}-${{ github.run_id }}-${{ github.run_number }}
+ - name: Load docker images
+ run: docker load -i /images/images.tar.gz
- name: deploy built docker images
env:
DOCKER_UN: ${{ secrets.Docker_Login }}
@@ -124,10 +283,8 @@ jobs:
#Returns true when none of the **previous** steps have failed or been canceled.
if: ${{ success() }}
needs:
- - build-test-deploy
+ - deploy
runs-on: ubuntu-latest
steps:
- name: CI/CD succeeded.
run: exit 0
-
-
diff --git a/tests/compose/filters/00_create_users.sh b/tests/compose/filters/00_create_users.sh
new file mode 100755
index 00000000..3c581685
--- /dev/null
+++ b/tests/compose/filters/00_create_users.sh
@@ -0,0 +1,5 @@
+echo "Creating user required for next test ..."
+# Should not fail and update the password; update mode
+docker-compose -f tests/compose/filters/docker-compose.yml exec -T admin flask mailu admin admin mailu.io 'password' --mode=update || exit 1
+docker-compose -f tests/compose/filters/docker-compose.yml exec -T admin flask mailu user user mailu.io 'password' || exit 1
+echo "User created successfully"
From a6ec14b42a7deb3af3c15a5530e9b50e18c6cadb Mon Sep 17 00:00:00 2001
From: Dimitri Huisman
Date: Sun, 27 Jun 2021 08:56:59 +0000
Subject: [PATCH 110/181] Fixed spacing in CI.yml
---
.github/workflows/CI.yml | 3 +--
1 file changed, 1 insertion(+), 2 deletions(-)
diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml
index 56cced1b..da770e28 100644
--- a/.github/workflows/CI.yml
+++ b/.github/workflows/CI.yml
@@ -63,7 +63,6 @@ jobs:
- name: save all docker images
run: docker save ${{ secrets.DOCKER_ORG }}/admin ${{ secrets.DOCKER_ORG }}/clamav ${{ secrets.DOCKER_ORG }}/docs ${{ secrets.DOCKER_ORG }}/dovecot ${{ secrets.DOCKER_ORG }}/fetchmail ${{ secrets.DOCKER_ORG }}/nginx ${{ secrets.DOCKER_ORG }}/none ${{ secrets.DOCKER_ORG }}/postfix ${{ secrets.DOCKER_ORG }}/postgresql ${{ secrets.DOCKER_ORG }}/radicale ${{ secrets.DOCKER_ORG }}/rainloop ${{ secrets.DOCKER_ORG }}/roundcube ${{ secrets.DOCKER_ORG }}/rspamd ${{ secrets.DOCKER_ORG }}/setup ${{ secrets.DOCKER_ORG }}/traefik-certdumper ${{ secrets.DOCKER_ORG }}/unbound -o /images/images.tar.gz
-
test-core:
name: Perform core tests
runs-on: ubuntu-latest
@@ -93,7 +92,7 @@ jobs:
TRAVIS_BRANCH: ${{ env.BRANCH }}
DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
- test-fetchmail:
+ test-fetchmail:
name: Perform fetchmail tests
runs-on: ubuntu-latest
needs:
From 2f51fe668869dbaecf3abb39b85b8707a98ffc6c Mon Sep 17 00:00:00 2001
From: Dimitri Huisman
Date: Sun, 27 Jun 2021 09:07:41 +0000
Subject: [PATCH 111/181] using != uses
---
.github/workflows/CI.yml | 16 ++++++++--------
1 file changed, 8 insertions(+), 8 deletions(-)
diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml
index da770e28..cf22317b 100644
--- a/.github/workflows/CI.yml
+++ b/.github/workflows/CI.yml
@@ -42,7 +42,7 @@ jobs:
- name: create folder for storing images
run: sudo mkdir -p /images
- name: configure images folder for caching
- using: actions/cache@v2
+ uses: actions/cache@v2
with:
path: /images
key: ${{ env.BRANCH }}-${{ github.run_id }}-${{ github.run_number }}
@@ -77,7 +77,7 @@ jobs:
- name: create folder for storing images
run: sudo mkdir -p /images
- name: configure images folder for caching
- using: actions/cache@v2
+ uses: actions/cache@v2
with:
path: /images
key: ${{ env.BRANCH }}-${{ github.run_id }}-${{ github.run_number }}
@@ -106,7 +106,7 @@ jobs:
- name: create folder for storing images
run: sudo mkdir -p /images
- name: configure images folder for caching
- using: actions/cache@v2
+ uses: actions/cache@v2
with:
path: /images
key: ${{ env.BRANCH }}-${{ github.run_id }}-${{ github.run_number }}
@@ -135,7 +135,7 @@ jobs:
- name: create folder for storing images
run: sudo mkdir -p /images
- name: configure images folder for caching
- using: actions/cache@v2
+ uses: actions/cache@v2
with:
path: /images
key: ${{ env.BRANCH }}-${{ github.run_id }}-${{ github.run_number }}
@@ -164,7 +164,7 @@ jobs:
- name: create folder for storing images
run: sudo mkdir -p /images
- name: configure images folder for caching
- using: actions/cache@v2
+ uses: actions/cache@v2
with:
path: /images
key: ${{ env.BRANCH }}-${{ github.run_id }}-${{ github.run_number }}
@@ -193,7 +193,7 @@ jobs:
- name: create folder for storing images
run: sudo mkdir -p /images
- name: configure images folder for caching
- using: actions/cache@v2
+ uses: actions/cache@v2
with:
path: /images
key: ${{ env.BRANCH }}-${{ github.run_id }}-${{ github.run_number }}
@@ -222,7 +222,7 @@ jobs:
- name: create folder for storing images
run: sudo mkdir -p /images
- name: configure images folder for caching
- using: actions/cache@v2
+ uses: actions/cache@v2
with:
path: /images
key: ${{ env.BRANCH }}-${{ github.run_id }}-${{ github.run_number }}
@@ -259,7 +259,7 @@ jobs:
- name: configure images folder for caching
# For staging we do not deploy images. So we do not have to load them from cache.
if: ${{ env.BRANCH != 'staging' }}
- using: actions/cache@v2
+ uses: actions/cache@v2
with:
path: /images
key: ${{ env.BRANCH }}-${{ github.run_id }}-${{ github.run_number }}
From 0468fb20649b25867ae1e429629beba9c36afbb9 Mon Sep 17 00:00:00 2001
From: Dimitri Huisman
Date: Sun, 27 Jun 2021 09:24:17 +0000
Subject: [PATCH 112/181] Forgot to set permissions on images folder. Added
changelog.
---
.github/workflows/CI.yml | 88 ++++++++++++++++---------------
towncrier/newsfragments/1830.misc | 1 +
2 files changed, 47 insertions(+), 42 deletions(-)
create mode 100644 towncrier/newsfragments/1830.misc
diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml
index cf22317b..c2c3413d 100644
--- a/.github/workflows/CI.yml
+++ b/.github/workflows/CI.yml
@@ -31,7 +31,7 @@ on:
jobs:
build:
- name: build images
+ name: Build images
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
@@ -39,28 +39,30 @@ jobs:
shell: bash
run: |
echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
- - name: create folder for storing images
- run: sudo mkdir -p /images
- - name: configure images folder for caching
+ - name: Create folder for storing images
+ run: |
+ sudo mkdir -p /images
+ chmod sudo chmod 777 /images
+ - name: Configure images folder for caching
uses: actions/cache@v2
with:
path: /images
key: ${{ env.BRANCH }}-${{ github.run_id }}-${{ github.run_number }}
- - name: check docker-compose version
+ - name: Check docker-compose version
run: docker-compose -v
- - name: login docker
+ - name: Login docker
env:
DOCKER_UN: ${{ secrets.Docker_Login }}
DOCKER_PW: ${{ secrets.Docker_Password }}
DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
run: echo "$DOCKER_PW" | docker login --username $DOCKER_UN --password-stdin
- - name: build all docker images
+ - name: Build all docker images
env:
MAILU_VERSION: ${{ env.BRANCH }}
TRAVIS_BRANCH: ${{ env.BRANCH }}
DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
run: docker-compose -f tests/build.yml build
- - name: save all docker images
+ - name: Save all docker images
run: docker save ${{ secrets.DOCKER_ORG }}/admin ${{ secrets.DOCKER_ORG }}/clamav ${{ secrets.DOCKER_ORG }}/docs ${{ secrets.DOCKER_ORG }}/dovecot ${{ secrets.DOCKER_ORG }}/fetchmail ${{ secrets.DOCKER_ORG }}/nginx ${{ secrets.DOCKER_ORG }}/none ${{ secrets.DOCKER_ORG }}/postfix ${{ secrets.DOCKER_ORG }}/postgresql ${{ secrets.DOCKER_ORG }}/radicale ${{ secrets.DOCKER_ORG }}/rainloop ${{ secrets.DOCKER_ORG }}/roundcube ${{ secrets.DOCKER_ORG }}/rspamd ${{ secrets.DOCKER_ORG }}/setup ${{ secrets.DOCKER_ORG }}/traefik-certdumper ${{ secrets.DOCKER_ORG }}/unbound -o /images/images.tar.gz
test-core:
@@ -74,18 +76,18 @@ jobs:
shell: bash
run: |
echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
- - name: create folder for storing images
+ - name: Create folder for storing images
run: sudo mkdir -p /images
- - name: configure images folder for caching
+ - name: Configure images folder for caching
uses: actions/cache@v2
with:
path: /images
key: ${{ env.BRANCH }}-${{ github.run_id }}-${{ github.run_number }}
- - name: install python packages
+ - name: Install python packages
run: python3 -m pip install -r tests/requirements.txt
- - name: copy all certs
+ - name: Copy all certs
run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*'
- - name: test core suite
+ - name: Test core suite
run: python tests/compose/test.py core 1
env:
MAILU_VERSION: ${{ env.BRANCH }}
@@ -103,18 +105,18 @@ jobs:
shell: bash
run: |
echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
- - name: create folder for storing images
+ - name: Create folder for storing images
run: sudo mkdir -p /images
- - name: configure images folder for caching
+ - name: Configure images folder for caching
uses: actions/cache@v2
with:
path: /images
key: ${{ env.BRANCH }}-${{ github.run_id }}-${{ github.run_number }}
- - name: install python packages
+ - name: Install python packages
run: python3 -m pip install -r tests/requirements.txt
- - name: copy all certs
+ - name: Copy all certs
run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*'
- - name: test fetch
+ - name: Test fetch
run: python tests/compose/test.py fetchmail 1
env:
MAILU_VERSION: ${{ env.BRANCH }}
@@ -132,18 +134,18 @@ jobs:
shell: bash
run: |
echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
- - name: create folder for storing images
+ - name: Create folder for storing images
run: sudo mkdir -p /images
- - name: configure images folder for caching
+ - name: Configure images folder for caching
uses: actions/cache@v2
with:
path: /images
key: ${{ env.BRANCH }}-${{ github.run_id }}-${{ github.run_number }}
- - name: install python packages
+ - name: Install python packages
run: python3 -m pip install -r tests/requirements.txt
- - name: copy all certs
+ - name: Copy all certs
run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*'
- - name: 'test clamvav'
+ - name: Test clamvav
run: python tests/compose/test.py filters 2
env:
MAILU_VERSION: ${{ env.BRANCH }}
@@ -161,18 +163,18 @@ jobs:
shell: bash
run: |
echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
- - name: create folder for storing images
+ - name: Create folder for storing images
run: sudo mkdir -p /images
- - name: configure images folder for caching
+ - name: Configure images folder for caching
uses: actions/cache@v2
with:
path: /images
key: ${{ env.BRANCH }}-${{ github.run_id }}-${{ github.run_number }}
- - name: install python packages
+ - name: Install python packages
run: python3 -m pip install -r tests/requirements.txt
- - name: copy all certs
+ - name: Copy all certs
run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*'
- - name: test rainloop
+ - name: Test rainloop
run: python tests/compose/test.py rainloop 1
env:
MAILU_VERSION: ${{ env.BRANCH }}
@@ -190,18 +192,18 @@ jobs:
shell: bash
run: |
echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
- - name: create folder for storing images
+ - name: Create folder for storing images
run: sudo mkdir -p /images
- - name: configure images folder for caching
+ - name: Configure images folder for caching
uses: actions/cache@v2
with:
path: /images
key: ${{ env.BRANCH }}-${{ github.run_id }}-${{ github.run_number }}
- - name: install python packages
+ - name: Install python packages
run: python3 -m pip install -r tests/requirements.txt
- - name: copy all certs
+ - name: Copy all certs
run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*'
- - name: test roundcube
+ - name: Test roundcube
run: python tests/compose/test.py roundcube 1
env:
MAILU_VERSION: ${{ env.BRANCH }}
@@ -219,18 +221,18 @@ jobs:
shell: bash
run: |
echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
- - name: create folder for storing images
+ - name: Create folder for storing images
run: sudo mkdir -p /images
- - name: configure images folder for caching
+ - name: Configure images folder for caching
uses: actions/cache@v2
with:
path: /images
key: ${{ env.BRANCH }}-${{ github.run_id }}-${{ github.run_number }}
- - name: install python packages
+ - name: Install python packages
run: python3 -m pip install -r tests/requirements.txt
- - name: copy all certs
+ - name: Copy all certs
run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*'
- - name: test webdav
+ - name: Test webdav
run: python tests/compose/test.py webdav 1
env:
MAILU_VERSION: ${{ env.BRANCH }}
@@ -254,9 +256,11 @@ jobs:
shell: bash
run: |
echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
- - name: create folder for storing images
- run: sudo mkdir -p /images
- - name: configure images folder for caching
+ - name: Create folder for storing images
+ run: |
+ sudo mkdir -p /images
+ chmod sudo chmod 777 /images
+ - name: Configure images folder for caching
# For staging we do not deploy images. So we do not have to load them from cache.
if: ${{ env.BRANCH != 'staging' }}
uses: actions/cache@v2
@@ -265,7 +269,7 @@ jobs:
key: ${{ env.BRANCH }}-${{ github.run_id }}-${{ github.run_number }}
- name: Load docker images
run: docker load -i /images/images.tar.gz
- - name: deploy built docker images
+ - name: Deploy built docker images
env:
DOCKER_UN: ${{ secrets.Docker_Login }}
DOCKER_PW: ${{ secrets.Docker_Password }}
diff --git a/towncrier/newsfragments/1830.misc b/towncrier/newsfragments/1830.misc
new file mode 100644
index 00000000..6de3aff1
--- /dev/null
+++ b/towncrier/newsfragments/1830.misc
@@ -0,0 +1 @@
+Make CI tests run in parallel.
From 782ffc084f49828007307e2dec367b6d705efe16 Mon Sep 17 00:00:00 2001
From: Dimitri Huisman
Date: Sun, 27 Jun 2021 09:26:11 +0000
Subject: [PATCH 113/181] Fixed typo
---
.github/workflows/CI.yml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml
index c2c3413d..9027ec10 100644
--- a/.github/workflows/CI.yml
+++ b/.github/workflows/CI.yml
@@ -42,7 +42,7 @@ jobs:
- name: Create folder for storing images
run: |
sudo mkdir -p /images
- chmod sudo chmod 777 /images
+ sudo chmod 777 /images
- name: Configure images folder for caching
uses: actions/cache@v2
with:
@@ -259,7 +259,7 @@ jobs:
- name: Create folder for storing images
run: |
sudo mkdir -p /images
- chmod sudo chmod 777 /images
+ sudo chmod 777 /images
- name: Configure images folder for caching
# For staging we do not deploy images. So we do not have to load them from cache.
if: ${{ env.BRANCH != 'staging' }}
From c6da021106012164299e23d751326c9a6bc9ad01 Mon Sep 17 00:00:00 2001
From: Dimitri Huisman
Date: Sun, 27 Jun 2021 09:31:39 +0000
Subject: [PATCH 114/181] Forgot to adapt all creat folder steps
---
.github/workflows/CI.yml | 24 ++++++++++++++++++------
1 file changed, 18 insertions(+), 6 deletions(-)
diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml
index 9027ec10..278d0ab8 100644
--- a/.github/workflows/CI.yml
+++ b/.github/workflows/CI.yml
@@ -77,7 +77,9 @@ jobs:
run: |
echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
- name: Create folder for storing images
- run: sudo mkdir -p /images
+ run: |
+ sudo mkdir -p /images
+ sudo chmod 777 /images
- name: Configure images folder for caching
uses: actions/cache@v2
with:
@@ -106,7 +108,9 @@ jobs:
run: |
echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
- name: Create folder for storing images
- run: sudo mkdir -p /images
+ run: |
+ sudo mkdir -p /images
+ sudo chmod 777 /images
- name: Configure images folder for caching
uses: actions/cache@v2
with:
@@ -135,7 +139,9 @@ jobs:
run: |
echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
- name: Create folder for storing images
- run: sudo mkdir -p /images
+ run: |
+ sudo mkdir -p /images
+ sudo chmod 777 /images
- name: Configure images folder for caching
uses: actions/cache@v2
with:
@@ -164,7 +170,9 @@ jobs:
run: |
echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
- name: Create folder for storing images
- run: sudo mkdir -p /images
+ run: |
+ sudo mkdir -p /images
+ sudo chmod 777 /images
- name: Configure images folder for caching
uses: actions/cache@v2
with:
@@ -193,7 +201,9 @@ jobs:
run: |
echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
- name: Create folder for storing images
- run: sudo mkdir -p /images
+ run: |
+ sudo mkdir -p /images
+ sudo chmod 777 /images
- name: Configure images folder for caching
uses: actions/cache@v2
with:
@@ -222,7 +232,9 @@ jobs:
run: |
echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
- name: Create folder for storing images
- run: sudo mkdir -p /images
+ run: |
+ sudo mkdir -p /images
+ sudo chmod 777 /images
- name: Configure images folder for caching
uses: actions/cache@v2
with:
From c2b1f23652716ab011b7be5910a1cde0c788e17e Mon Sep 17 00:00:00 2001
From: Dimitri Huisman
Date: Sun, 27 Jun 2021 09:44:38 +0000
Subject: [PATCH 115/181] It helps to also load the docker images for the
tests.
---
.github/workflows/CI.yml | 12 ++++++++++++
1 file changed, 12 insertions(+)
diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml
index 278d0ab8..5acb5c5f 100644
--- a/.github/workflows/CI.yml
+++ b/.github/workflows/CI.yml
@@ -85,6 +85,8 @@ jobs:
with:
path: /images
key: ${{ env.BRANCH }}-${{ github.run_id }}-${{ github.run_number }}
+ - name: Load docker images
+ run: docker load -i /images/images.tar.gz
- name: Install python packages
run: python3 -m pip install -r tests/requirements.txt
- name: Copy all certs
@@ -116,6 +118,8 @@ jobs:
with:
path: /images
key: ${{ env.BRANCH }}-${{ github.run_id }}-${{ github.run_number }}
+ - name: Load docker images
+ run: docker load -i /images/images.tar.gz
- name: Install python packages
run: python3 -m pip install -r tests/requirements.txt
- name: Copy all certs
@@ -147,6 +151,8 @@ jobs:
with:
path: /images
key: ${{ env.BRANCH }}-${{ github.run_id }}-${{ github.run_number }}
+ - name: Load docker images
+ run: docker load -i /images/images.tar.gz
- name: Install python packages
run: python3 -m pip install -r tests/requirements.txt
- name: Copy all certs
@@ -178,6 +184,8 @@ jobs:
with:
path: /images
key: ${{ env.BRANCH }}-${{ github.run_id }}-${{ github.run_number }}
+ - name: Load docker images
+ run: docker load -i /images/images.tar.gz
- name: Install python packages
run: python3 -m pip install -r tests/requirements.txt
- name: Copy all certs
@@ -209,6 +217,8 @@ jobs:
with:
path: /images
key: ${{ env.BRANCH }}-${{ github.run_id }}-${{ github.run_number }}
+ - name: Load docker images
+ run: docker load -i /images/images.tar.gz
- name: Install python packages
run: python3 -m pip install -r tests/requirements.txt
- name: Copy all certs
@@ -240,6 +250,8 @@ jobs:
with:
path: /images
key: ${{ env.BRANCH }}-${{ github.run_id }}-${{ github.run_number }}
+ - name: Load docker images
+ run: docker load -i /images/images.tar.gz
- name: Install python packages
run: python3 -m pip install -r tests/requirements.txt
- name: Copy all certs
From 75ee2cd1f7f79d1ff0fc9e1e3db95b4310961b2c Mon Sep 17 00:00:00 2001
From: Dimitri Huisman <52963853+Diman0@users.noreply.github.com>
Date: Mon, 28 Jun 2021 22:38:01 +0200
Subject: [PATCH 116/181] Added manual trigger for workflow
Workflow file only triggers if it resides in the branch. If an old PR based on the mailu repo without CI.yml is tested, then the workflow run will not trigger. The merged commit on TESTING/STAGING branch does not contain the required CI.yml workflow file after all. In these cases simply run the workflow manually on the TESTING or STAGING branch,
---
.github/workflows/CI.yml | 5 +++++
1 file changed, 5 insertions(+)
diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml
index 71897c52..8b7d1e01 100644
--- a/.github/workflows/CI.yml
+++ b/.github/workflows/CI.yml
@@ -1,5 +1,10 @@
name: CI
on:
+#NOTE: The workflow will ONLY trigger when the branch actually contains the CI.yml workflow file.
+#So if a PR is tested on STAGING/TESTING branch by BORS without this file present, then the workflow
+#will NOT trigger. For these situations, manually start the workflow. This should be resolved once all
+#old PRs without the CI.yml workflow file have been merged.
+ workflow_dispatch:
push:
branches:
- staging
From b2840fed263e28d9c2c944dcff57f8f5c007e58c Mon Sep 17 00:00:00 2001
From: Dimitri Huisman <52963853+Diman0@users.noreply.github.com>
Date: Mon, 28 Jun 2021 22:51:23 +0200
Subject: [PATCH 117/181] Update CI.yml
---
.github/workflows/CI.yml | 5 -----
1 file changed, 5 deletions(-)
diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml
index c1c8d15d..5acb5c5f 100644
--- a/.github/workflows/CI.yml
+++ b/.github/workflows/CI.yml
@@ -1,10 +1,5 @@
name: CI
on:
-#NOTE: The workflow will ONLY trigger when the branch actually contains the CI.yml workflow file.
-#So if a PR is tested on STAGING/TESTING branch by BORS without this file present, then the workflow
-#will NOT trigger. For these situations, manually start the workflow. This should be resolved once all
-#old PRs without the CI.yml workflow file have been merged.
- workflow_dispatch:
push:
branches:
- staging
From 1566dfb077f0b0858fdb597caab4b8dc58552204 Mon Sep 17 00:00:00 2001
From: Dimitri Huisman <52963853+Diman0@users.noreply.github.com>
Date: Tue, 29 Jun 2021 08:58:44 +0200
Subject: [PATCH 118/181] Forgot to add condition to load docker images step
---
.github/workflows/CI.yml | 1 +
1 file changed, 1 insertion(+)
diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml
index 5acb5c5f..2b81a60d 100644
--- a/.github/workflows/CI.yml
+++ b/.github/workflows/CI.yml
@@ -292,6 +292,7 @@ jobs:
path: /images
key: ${{ env.BRANCH }}-${{ github.run_id }}-${{ github.run_number }}
- name: Load docker images
+ if: ${{ env.BRANCH != 'staging' }}
run: docker load -i /images/images.tar.gz
- name: Deploy built docker images
env:
From 8998e21f7b20720eea454fd3f7482c5236611967 Mon Sep 17 00:00:00 2001
From: Dimitri Huisman
Date: Tue, 29 Jun 2021 07:59:24 +0000
Subject: [PATCH 119/181] Set static hostname for antispam to preserve history.
See #561
---
setup/flavors/compose/docker-compose.yml | 1 +
setup/flavors/stack/docker-compose.yml | 1 +
2 files changed, 2 insertions(+)
diff --git a/setup/flavors/compose/docker-compose.yml b/setup/flavors/compose/docker-compose.yml
index 155e1180..08bba13b 100644
--- a/setup/flavors/compose/docker-compose.yml
+++ b/setup/flavors/compose/docker-compose.yml
@@ -85,6 +85,7 @@ services:
antispam:
image: ${DOCKER_ORG:-mailu}/${DOCKER_PREFIX:-}rspamd:${MAILU_VERSION:-{{ version }}}
+ hostname: antispam
restart: always
env_file: {{ env }}
volumes:
diff --git a/setup/flavors/stack/docker-compose.yml b/setup/flavors/stack/docker-compose.yml
index d9c5cd4f..df1fe7b4 100644
--- a/setup/flavors/stack/docker-compose.yml
+++ b/setup/flavors/stack/docker-compose.yml
@@ -70,6 +70,7 @@ services:
antispam:
image: ${DOCKER_ORG:-mailu}/${DOCKER_PREFIX:-}rspamd:${MAILU_VERSION:-{{ version }}}
+ hostname: antispam
env_file: {{ env }}
volumes:
- "{{ root }}/filter:/var/lib/rspamd"
From ef5741ef8053deca23e9d0410453623638a7de5f Mon Sep 17 00:00:00 2001
From: Dimitri Huisman
Date: Tue, 29 Jun 2021 08:07:43 +0000
Subject: [PATCH 120/181] Add newsfragment
---
towncrier/newsfragments/1837.bugfix | 1 +
1 file changed, 1 insertion(+)
create mode 100644 towncrier/newsfragments/1837.bugfix
diff --git a/towncrier/newsfragments/1837.bugfix b/towncrier/newsfragments/1837.bugfix
new file mode 100644
index 00000000..dcabcc6b
--- /dev/null
+++ b/towncrier/newsfragments/1837.bugfix
@@ -0,0 +1 @@
+Antispam service now uses a static hostname. Rspamd history is only retained when the service has a fixed hostname.
From 52005cf4e91d62f5c34824995be0bb61c3024097 Mon Sep 17 00:00:00 2001
From: Dimitri Huisman <52963853+Diman0@users.noreply.github.com>
Date: Tue, 29 Jun 2021 13:57:49 +0200
Subject: [PATCH 121/181] Update list of trusted authors.
Now the list of trusted authors reflects the contributors group again.
---
.mergify.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.mergify.yml b/.mergify.yml
index 2af387ed..6cd6a5a3 100644
--- a/.mergify.yml
+++ b/.mergify.yml
@@ -27,7 +27,7 @@ pull_request_rules:
- name: Trusted author and 1 approved review; trigger bors r+
conditions:
- - author~=^(mergify|kaiyou|muhlemmer|mildred|HorayNarea|adi90x|hoellen|ofthesun9|Nebukadneza|micw|lub|Diman0)$
+ - author~=^(mergify|kaiyou|muhlemmer|mildred|HorayNarea|hoellen|ofthesun9|Nebukadneza|micw|lub|Diman0|3-w-c|decentral1se|ghostwheel42|nextgens|parisni)$
- -title~=(WIP|wip)
- -label~=^(status/wip|status/blocked|review/need2)$
- "#approved-reviews-by>=1"
From fbd945390d32a3804ef8abd3b5c98e479890ddef Mon Sep 17 00:00:00 2001
From: Alexander Graf
Date: Tue, 29 Jun 2021 16:13:04 +0200
Subject: [PATCH 122/181] cleaned imports and fixed datetime and passlib use
---
core/admin/mailu/models.py | 8 +++-----
1 file changed, 3 insertions(+), 5 deletions(-)
diff --git a/core/admin/mailu/models.py b/core/admin/mailu/models.py
index 58e38de9..42711cf0 100644
--- a/core/admin/mailu/models.py
+++ b/core/admin/mailu/models.py
@@ -1,7 +1,6 @@
""" Mailu config storage model
"""
-import re
import os
import smtplib
import json
@@ -17,7 +16,6 @@ import passlib.hash
import passlib.registry
import time
import os
-import glob
import hmac
import smtplib
import idna
@@ -533,7 +531,7 @@ class User(Base, Email):
if cache_result and current_salt:
cache_salt, cache_hash = cache_result
if cache_salt == current_salt:
- return hash.pbkdf2_sha256.verify(password, cache_hash)
+ return passlib.hash.pbkdf2_sha256.verify(password, cache_hash)
else:
# the cache is local per gunicorn; the password has changed
# so the local cache can be invalidated
@@ -560,7 +558,7 @@ we have little control over GC and string interning anyways.
An attacker that can dump the process' memory is likely to find credentials
in clear-text regardless of the presence of the cache.
"""
- self._credential_cache[self.get_id()] = (self.password.split('$')[3], hash.pbkdf2_sha256.using(rounds=1).hash(password))
+ self._credential_cache[self.get_id()] = (self.password.split('$')[3], passlib.hash.pbkdf2_sha256.using(rounds=1).hash(password))
return result
def set_password(self, password, raw=False):
@@ -604,7 +602,7 @@ in clear-text regardless of the presence of the cache.
@classmethod
def get_temp_token(cls, email):
user = cls.query.get(email)
- return hmac.new(app.temp_token_key, bytearray("{}|{}".format(datetime.utcnow().strftime("%Y%m%d"), email), 'utf-8'), 'sha256').hexdigest() if (user and user.enabled) else None
+ return hmac.new(app.temp_token_key, bytearray("{}|{}".format(time.strftime('%Y%m%d'), email), 'utf-8'), 'sha256').hexdigest() if (user and user.enabled) else None
def verify_temp_token(self, token):
return hmac.compare_digest(self.get_temp_token(self.email), token)
From 14bdeb5e1e90d5368923290299485f538d3396ee Mon Sep 17 00:00:00 2001
From: Alexander Graf
Date: Wed, 30 Jun 2021 12:36:11 +0200
Subject: [PATCH 123/181] Update version of roundcube webmail and carddav
plugin.
This is a security update.
- roundcube 1.4.11
- carddav 4.1.2
---
towncrier/UPDATE-ROUNDCUBE.feature | 1 +
webmails/roundcube/Dockerfile | 12 ++++--------
2 files changed, 5 insertions(+), 8 deletions(-)
create mode 100644 towncrier/UPDATE-ROUNDCUBE.feature
diff --git a/towncrier/UPDATE-ROUNDCUBE.feature b/towncrier/UPDATE-ROUNDCUBE.feature
new file mode 100644
index 00000000..85bd17fc
--- /dev/null
+++ b/towncrier/UPDATE-ROUNDCUBE.feature
@@ -0,0 +1 @@
+Update version of roundcube webmail and carddav plugin. This is a security update.
diff --git a/webmails/roundcube/Dockerfile b/webmails/roundcube/Dockerfile
index 0c8a1f42..fae02ce0 100644
--- a/webmails/roundcube/Dockerfile
+++ b/webmails/roundcube/Dockerfile
@@ -16,9 +16,9 @@ RUN apt-get update && apt-get install -y \
# Shared layer between nginx, dovecot, postfix, postgresql, rspamd, unbound, rainloop, roundcube
RUN pip3 install socrate
-ENV ROUNDCUBE_URL https://github.com/roundcube/roundcubemail/releases/download/1.4.6/roundcubemail-1.4.6-complete.tar.gz
+ENV ROUNDCUBE_URL https://github.com/roundcube/roundcubemail/releases/download/1.4.11/roundcubemail-1.4.11-complete.tar.gz
-ENV CARDDAV_URL https://github.com/blind-coder/rcmcarddav/releases/download/v3.0.3/carddav-3.0.3.tar.bz2
+ENV CARDDAV_URL https://github.com/mstilkerich/rcmcarddav/releases/download/v4.1.2/carddav-v4.1.2.tar.gz
RUN apt-get update && apt-get install -y \
zlib1g-dev libzip4 libzip-dev libpq-dev \
@@ -28,12 +28,8 @@ RUN apt-get update && apt-get install -y \
&& echo date.timezone=UTC > /usr/local/etc/php/conf.d/timezone.ini \
&& rm -rf /var/www/html/ \
&& cd /var/www \
- && curl -L -O ${ROUNDCUBE_URL} \
- && curl -L -O ${CARDDAV_URL} \
- && tar -xf *.tar.gz \
- && tar -xf *.tar.bz2 \
- && rm -f *.tar.gz \
- && rm -f *.tar.bz2 \
+ && curl -sL ${ROUNDCUBE_URL} | tar xz \
+ && curl -sL ${CARDDAV_URL} | tar xz \
&& mv roundcubemail-* html \
&& mv carddav html/plugins/ \
&& cd html \
From 6740c77e4383b8bfb9713ec1b34280af0feb6cbc Mon Sep 17 00:00:00 2001
From: Alexander Graf
Date: Fri, 2 Jul 2021 18:44:21 +0200
Subject: [PATCH 124/181] small bugfix for exception
---
core/admin/mailu/schemas.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/core/admin/mailu/schemas.py b/core/admin/mailu/schemas.py
index 2742edf1..191d01ac 100644
--- a/core/admin/mailu/schemas.py
+++ b/core/admin/mailu/schemas.py
@@ -590,7 +590,7 @@ class DkimKeyField(fields.String):
value = value[:pos]
else:
footer = '-----END PRIVATE KEY-----'
- except ValueError:
+ except ValueError as exc:
raise ValidationError(f'invalid dkim key {bad_key!r}') from exc
# remove whitespace from key data
From 2045ae2e10c37074966996c842bef4fa93df4538 Mon Sep 17 00:00:00 2001
From: Alexander Graf
Date: Fri, 2 Jul 2021 22:47:51 +0200
Subject: [PATCH 125/181] updated changelog file
---
towncrier/{UPDATE-ROUNDCUBE.feature => 1841.feature} | 0
1 file changed, 0 insertions(+), 0 deletions(-)
rename towncrier/{UPDATE-ROUNDCUBE.feature => 1841.feature} (100%)
diff --git a/towncrier/UPDATE-ROUNDCUBE.feature b/towncrier/1841.feature
similarity index 100%
rename from towncrier/UPDATE-ROUNDCUBE.feature
rename to towncrier/1841.feature
From 92896ae646eb007ac8286eedf39de9cc15d1c65f Mon Sep 17 00:00:00 2001
From: Alexander Graf
Date: Sat, 3 Jul 2021 11:40:32 +0200
Subject: [PATCH 126/181] fix bugs in model and schema introduced by #1604
---
core/admin/mailu/models.py | 8 ++++----
core/admin/mailu/schemas.py | 2 +-
2 files changed, 5 insertions(+), 5 deletions(-)
diff --git a/core/admin/mailu/models.py b/core/admin/mailu/models.py
index 42711cf0..b5ba29c0 100644
--- a/core/admin/mailu/models.py
+++ b/core/admin/mailu/models.py
@@ -628,10 +628,10 @@ class Alias(Base, Email):
sqlalchemy.and_(cls.domain_name == domain_name,
sqlalchemy.or_(
sqlalchemy.and_(
- cls.wildcard is False,
+ cls.wildcard == False,
cls.localpart == localpart
), sqlalchemy.and_(
- cls.wildcard is True,
+ cls.wildcard == True,
sqlalchemy.bindparam('l', localpart).like(cls.localpart)
)
)
@@ -643,10 +643,10 @@ class Alias(Base, Email):
sqlalchemy.and_(cls.domain_name == domain_name,
sqlalchemy.or_(
sqlalchemy.and_(
- cls.wildcard is False,
+ cls.wildcard == False,
sqlalchemy.func.lower(cls.localpart) == localpart_lower
), sqlalchemy.and_(
- cls.wildcard is True,
+ cls.wildcard == True,
sqlalchemy.bindparam('l', localpart_lower).like(
sqlalchemy.func.lower(cls.localpart))
)
diff --git a/core/admin/mailu/schemas.py b/core/admin/mailu/schemas.py
index 2742edf1..191d01ac 100644
--- a/core/admin/mailu/schemas.py
+++ b/core/admin/mailu/schemas.py
@@ -590,7 +590,7 @@ class DkimKeyField(fields.String):
value = value[:pos]
else:
footer = '-----END PRIVATE KEY-----'
- except ValueError:
+ except ValueError as exc:
raise ValidationError(f'invalid dkim key {bad_key!r}') from exc
# remove whitespace from key data
From d75c8469d3dbef8776eccf31538d2ee64fa733f2 Mon Sep 17 00:00:00 2001
From: Florent Daigniere
Date: Sat, 3 Jul 2021 15:10:34 +0200
Subject: [PATCH 127/181] Update rainloop to 1.16.0
---
towncrier/1845.feature | 1 +
webmails/rainloop/Dockerfile | 2 +-
2 files changed, 2 insertions(+), 1 deletion(-)
create mode 100644 towncrier/1845.feature
diff --git a/towncrier/1845.feature b/towncrier/1845.feature
new file mode 100644
index 00000000..afde9313
--- /dev/null
+++ b/towncrier/1845.feature
@@ -0,0 +1 @@
+Update version of rainloop webmail to 1.16.0. This is a security update.
diff --git a/webmails/rainloop/Dockerfile b/webmails/rainloop/Dockerfile
index 9987330e..9c65f277 100644
--- a/webmails/rainloop/Dockerfile
+++ b/webmails/rainloop/Dockerfile
@@ -17,7 +17,7 @@ RUN apt-get update && apt-get install -y \
# Shared layer between nginx, dovecot, postfix, postgresql, rspamd, unbound, rainloop, roundcube
RUN pip3 install socrate
-ENV RAINLOOP_URL https://github.com/RainLoop/rainloop-webmail/releases/download/v1.14.0/rainloop-community-1.14.0.zip
+ENV RAINLOOP_URL https://github.com/RainLoop/rainloop-webmail/releases/download/v1.16.0/rainloop-community-1.16.0.zip
RUN apt-get update && apt-get install -y \
unzip python3-jinja2 \
From 87fe34e0a3e43029f69aac695676ba38de81c502 Mon Sep 17 00:00:00 2001
From: Alexander Graf
Date: Sat, 3 Jul 2021 19:35:44 +0200
Subject: [PATCH 128/181] fix newsfragment of #1841
---
towncrier/{ => newsfragments}/1841.feature | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
rename towncrier/{ => newsfragments}/1841.feature (77%)
diff --git a/towncrier/1841.feature b/towncrier/newsfragments/1841.feature
similarity index 77%
rename from towncrier/1841.feature
rename to towncrier/newsfragments/1841.feature
index 85bd17fc..c91f805f 100644
--- a/towncrier/1841.feature
+++ b/towncrier/newsfragments/1841.feature
@@ -1 +1 @@
-Update version of roundcube webmail and carddav plugin. This is a security update.
+Update version of roundcube webmail and carddav plugin. This is a security update.
\ No newline at end of file
From 8b71a92219bd9e477eaa13536ff298a989858dcd Mon Sep 17 00:00:00 2001
From: Alexander Graf
Date: Sat, 3 Jul 2021 22:32:47 +0200
Subject: [PATCH 129/181] use fixed msg for key derivation
---
core/admin/mailu/utils.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/core/admin/mailu/utils.py b/core/admin/mailu/utils.py
index c7e1f73c..02150754 100644
--- a/core/admin/mailu/utils.py
+++ b/core/admin/mailu/utils.py
@@ -280,7 +280,7 @@ class MailuSessionConfig:
key = want_bytes(app.secret_key)
- self._hmac = hmac.new(hmac.digest(key, key, digest='sha256'), digestmod='sha256')
+ self._hmac = hmac.new(hmac.digest(key, b'SESSION_UID_HASH', digest='sha256'), digestmod='sha256')
self._uid_len = uid_bytes
self._uid_b64 = len(self._encode(bytes(uid_bytes)))
self._sid_len = sid_bytes
From 11917a5011704a69bdccb8382a11cd969d3e76e6 Mon Sep 17 00:00:00 2001
From: Florent Daigniere
Date: Sun, 4 Jul 2021 19:18:53 +0200
Subject: [PATCH 130/181] mend
---
towncrier/{ => newsfragments}/1845.feature | 0
1 file changed, 0 insertions(+), 0 deletions(-)
rename towncrier/{ => newsfragments}/1845.feature (100%)
diff --git a/towncrier/1845.feature b/towncrier/newsfragments/1845.feature
similarity index 100%
rename from towncrier/1845.feature
rename to towncrier/newsfragments/1845.feature
From 420afa53f88988b1bd533deb2eed81c8b25e2ae1 Mon Sep 17 00:00:00 2001
From: Florent Daigniere
Date: Mon, 5 Jul 2021 15:50:49 +0200
Subject: [PATCH 131/181] Upgrade to alpine 3.14
---
core/admin/Dockerfile | 2 +-
core/dovecot/Dockerfile | 2 +-
core/nginx/Dockerfile | 2 +-
core/none/Dockerfile | 2 +-
core/postfix/Dockerfile | 2 +-
core/rspamd/Dockerfile | 2 +-
docs/Dockerfile | 2 +-
optional/clamav/Dockerfile | 2 +-
optional/fetchmail/Dockerfile | 2 +-
optional/postgresql/Dockerfile | 2 +-
optional/radicale/Dockerfile | 2 +-
optional/unbound/Dockerfile | 2 +-
setup/Dockerfile | 2 +-
13 files changed, 13 insertions(+), 13 deletions(-)
diff --git a/core/admin/Dockerfile b/core/admin/Dockerfile
index f3b8643c..3153bd9e 100644
--- a/core/admin/Dockerfile
+++ b/core/admin/Dockerfile
@@ -1,5 +1,5 @@
# First stage to build assets
-ARG DISTRO=alpine:3.12
+ARG DISTRO=alpine:3.14
ARG ARCH=""
FROM ${ARCH}node:8 as assets
COPY --from=balenalib/rpi-alpine:3.10 /usr/bin/qemu-arm-static /usr/bin/qemu-arm-static
diff --git a/core/dovecot/Dockerfile b/core/dovecot/Dockerfile
index e1c20eff..22145bde 100644
--- a/core/dovecot/Dockerfile
+++ b/core/dovecot/Dockerfile
@@ -1,4 +1,4 @@
-ARG DISTRO=alpine:3.13
+ARG DISTRO=alpine:3.14
FROM $DISTRO as builder
WORKDIR /tmp
RUN apk add git build-base automake autoconf libtool dovecot-dev xapian-core-dev icu-dev
diff --git a/core/nginx/Dockerfile b/core/nginx/Dockerfile
index 2bc1cfd1..1906ed31 100644
--- a/core/nginx/Dockerfile
+++ b/core/nginx/Dockerfile
@@ -1,4 +1,4 @@
-ARG DISTRO=alpine:3.12
+ARG DISTRO=alpine:3.14
FROM $DISTRO
# python3 shared with most images
RUN apk add --no-cache \
diff --git a/core/none/Dockerfile b/core/none/Dockerfile
index 70041dac..51b8d1c5 100644
--- a/core/none/Dockerfile
+++ b/core/none/Dockerfile
@@ -1,6 +1,6 @@
# This is an idle image to dynamically replace any component if disabled.
-ARG DISTRO=alpine:3.12
+ARG DISTRO=alpine:3.14
FROM $DISTRO
CMD sleep 1000000d
diff --git a/core/postfix/Dockerfile b/core/postfix/Dockerfile
index af29bf91..939feb9c 100644
--- a/core/postfix/Dockerfile
+++ b/core/postfix/Dockerfile
@@ -1,4 +1,4 @@
-ARG DISTRO=alpine:3.12
+ARG DISTRO=alpine:3.14
FROM $DISTRO
# python3 shared with most images
RUN apk add --no-cache \
diff --git a/core/rspamd/Dockerfile b/core/rspamd/Dockerfile
index acaf074e..6706ef14 100644
--- a/core/rspamd/Dockerfile
+++ b/core/rspamd/Dockerfile
@@ -1,4 +1,4 @@
-ARG DISTRO=alpine:3.12
+ARG DISTRO=alpine:3.14
FROM $DISTRO
# python3 shared with most images
RUN apk add --no-cache \
diff --git a/docs/Dockerfile b/docs/Dockerfile
index 70c9c3c4..11f66b49 100644
--- a/docs/Dockerfile
+++ b/docs/Dockerfile
@@ -1,4 +1,4 @@
-ARG DISTRO=alpine:3.8
+ARG DISTRO=alpine:3.14
FROM $DISTRO
COPY requirements.txt /requirements.txt
diff --git a/optional/clamav/Dockerfile b/optional/clamav/Dockerfile
index 1132845f..20cebcdc 100644
--- a/optional/clamav/Dockerfile
+++ b/optional/clamav/Dockerfile
@@ -1,4 +1,4 @@
-ARG DISTRO=alpine:3.12
+ARG DISTRO=alpine:3.14
FROM $DISTRO
# python3 shared with most images
RUN apk add --no-cache \
diff --git a/optional/fetchmail/Dockerfile b/optional/fetchmail/Dockerfile
index a707a54a..506e409a 100644
--- a/optional/fetchmail/Dockerfile
+++ b/optional/fetchmail/Dockerfile
@@ -1,4 +1,4 @@
-ARG DISTRO=alpine:3.12
+ARG DISTRO=alpine:3.14
FROM $DISTRO
# python3 shared with most images
diff --git a/optional/postgresql/Dockerfile b/optional/postgresql/Dockerfile
index ff25a66f..9c6558b9 100644
--- a/optional/postgresql/Dockerfile
+++ b/optional/postgresql/Dockerfile
@@ -1,4 +1,4 @@
-ARG DISTRO=alpine:3.12
+ARG DISTRO=alpine:3.14
FROM $DISTRO
# python3 shared with most images
RUN apk add --no-cache \
diff --git a/optional/radicale/Dockerfile b/optional/radicale/Dockerfile
index 400b1a3f..13761164 100644
--- a/optional/radicale/Dockerfile
+++ b/optional/radicale/Dockerfile
@@ -1,4 +1,4 @@
-ARG DISTRO=alpine:3.12
+ARG DISTRO=alpine:3.14
FROM $DISTRO
# python3 shared with most images
diff --git a/optional/unbound/Dockerfile b/optional/unbound/Dockerfile
index abb45420..2b472d44 100644
--- a/optional/unbound/Dockerfile
+++ b/optional/unbound/Dockerfile
@@ -1,4 +1,4 @@
-ARG DISTRO=alpine:3.12
+ARG DISTRO=alpine:3.14
FROM $DISTRO
# python3 shared with most images
RUN apk add --no-cache \
diff --git a/setup/Dockerfile b/setup/Dockerfile
index 2b3c3c6c..5775ab6b 100644
--- a/setup/Dockerfile
+++ b/setup/Dockerfile
@@ -1,4 +1,4 @@
-ARG DISTRO=alpine:3.10
+ARG DISTRO=alpine:3.14
FROM $DISTRO
RUN mkdir -p /app
From 0211c06c37fc466291877d1bf064d252f8b5bb7f Mon Sep 17 00:00:00 2001
From: Florent Daigniere
Date: Mon, 5 Jul 2021 15:54:04 +0200
Subject: [PATCH 132/181] don't need sudo here
---
optional/postgresql/Dockerfile | 1 -
1 file changed, 1 deletion(-)
diff --git a/optional/postgresql/Dockerfile b/optional/postgresql/Dockerfile
index 9c6558b9..0f5034da 100644
--- a/optional/postgresql/Dockerfile
+++ b/optional/postgresql/Dockerfile
@@ -3,7 +3,6 @@ FROM $DISTRO
# python3 shared with most images
RUN apk add --no-cache \
python3 py3-pip bash py3-multidict \
- && apk add --upgrade sudo \
&& pip3 install --upgrade pip
# Shared layer between nginx, dovecot, postfix, postgresql, rspamd, unbound, rainloop, roundcube
From 72735ab320cd2b0b9164fefcd16620ba2032cb2c Mon Sep 17 00:00:00 2001
From: Florent Daigniere
Date: Mon, 5 Jul 2021 17:08:05 +0200
Subject: [PATCH 133/181] remove cyrus-sasl-plain
---
core/postfix/Dockerfile | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/core/postfix/Dockerfile b/core/postfix/Dockerfile
index 939feb9c..062155c1 100644
--- a/core/postfix/Dockerfile
+++ b/core/postfix/Dockerfile
@@ -12,7 +12,7 @@ RUN pip3 install socrate==0.2.0
RUN pip3 install "podop>0.2.5"
# Image specific layers under this line
-RUN apk add --no-cache postfix postfix-pcre cyrus-sasl-plain cyrus-sasl-login
+RUN apk add --no-cache postfix postfix-pcre cyrus-sasl-login
COPY conf /conf
COPY start.py /start.py
From 474e5aa5278ee2f924e820867e0febf5636ba0c3 Mon Sep 17 00:00:00 2001
From: Florent Daigniere
Date: Mon, 5 Jul 2021 17:11:09 +0200
Subject: [PATCH 134/181] document
---
towncrier/newsfragments/1851.feature | 1 +
1 file changed, 1 insertion(+)
create mode 100644 towncrier/newsfragments/1851.feature
diff --git a/towncrier/newsfragments/1851.feature b/towncrier/newsfragments/1851.feature
new file mode 100644
index 00000000..e01f5cb4
--- /dev/null
+++ b/towncrier/newsfragments/1851.feature
@@ -0,0 +1 @@
+Remove cyrus-sasl-plain as it's not packaged by alpine anymore. SASL-login is still available and used when relaying.
From 7aa9b496fabbac77219227a2f33baf47a5435b67 Mon Sep 17 00:00:00 2001
From: Dimitri Huisman
Date: Mon, 5 Jul 2021 17:09:48 +0000
Subject: [PATCH 135/181] Moved logic for building testing images from deploy
job to build job. This should save time. Test number for ci workflow #1234
---
.github/workflows/CI.yml | 34 ++++++++++++++++++++++++----------
tests/deploy.sh | 9 ---------
2 files changed, 24 insertions(+), 19 deletions(-)
diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml
index 2b81a60d..2db8de97 100644
--- a/.github/workflows/CI.yml
+++ b/.github/workflows/CI.yml
@@ -13,7 +13,7 @@ on:
- '[1-9].[0-9].[0-9]'
# pre-releases, e.g. 1.8-pre1
- 1.8-pre[0-9]
- # test branches, e.g. test-debian
+ # test branches, e.g. test-debian
- test-*
###############################################
@@ -39,6 +39,21 @@ jobs:
shell: bash
run: |
echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
+ #For branch TESTING, we set the image tag to PR-xxxx
+ - name: Derive MAILU_VERSION for branch testing
+ if: ${{ env.BRANCH == 'testing' }}
+ shell: bash
+ env:
+ COMMIT_MESSAGE: ${{ github.event.head_commit.message }}
+ run: |
+ echo "MAILU_VERSION=pr-${COMMIT_MESSAGE//[!0-9]/}" >> $GITHUB_ENV
+ - name: Derive MAILU_VERSION for other branches than testing
+ if: ${{ env.BRANCH != 'testing' }}
+ shell: bash
+ env:
+ MAILU_BRANCH: ${{ env.BRANCH }}
+ run: |
+ echo "MAILU_VERSION=${{ MAILU_BRANCH }}" >> $GITHUB_ENV
- name: Create folder for storing images
run: |
sudo mkdir -p /images
@@ -58,7 +73,7 @@ jobs:
run: echo "$DOCKER_PW" | docker login --username $DOCKER_UN --password-stdin
- name: Build all docker images
env:
- MAILU_VERSION: ${{ env.BRANCH }}
+ MAILU_VERSION: ${{ env.MAILU_VERSION }}
TRAVIS_BRANCH: ${{ env.BRANCH }}
DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
run: docker-compose -f tests/build.yml build
@@ -94,7 +109,7 @@ jobs:
- name: Test core suite
run: python tests/compose/test.py core 1
env:
- MAILU_VERSION: ${{ env.BRANCH }}
+ MAILU_VERSION: ${{ env.MAILU_VERSION }}
TRAVIS_BRANCH: ${{ env.BRANCH }}
DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
@@ -127,7 +142,7 @@ jobs:
- name: Test fetch
run: python tests/compose/test.py fetchmail 1
env:
- MAILU_VERSION: ${{ env.BRANCH }}
+ MAILU_VERSION: ${{ env.MAILU_VERSION }}
TRAVIS_BRANCH: ${{ env.BRANCH }}
DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
@@ -160,7 +175,7 @@ jobs:
- name: Test clamvav
run: python tests/compose/test.py filters 2
env:
- MAILU_VERSION: ${{ env.BRANCH }}
+ MAILU_VERSION: ${{ env.MAILU_VERSION }}
TRAVIS_BRANCH: ${{ env.BRANCH }}
DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
@@ -193,7 +208,7 @@ jobs:
- name: Test rainloop
run: python tests/compose/test.py rainloop 1
env:
- MAILU_VERSION: ${{ env.BRANCH }}
+ MAILU_VERSION: ${{ env.MAILU_VERSION }}
TRAVIS_BRANCH: ${{ env.BRANCH }}
DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
@@ -226,7 +241,7 @@ jobs:
- name: Test roundcube
run: python tests/compose/test.py roundcube 1
env:
- MAILU_VERSION: ${{ env.BRANCH }}
+ MAILU_VERSION: ${{ env.MAILU_VERSION }}
TRAVIS_BRANCH: ${{ env.BRANCH }}
DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
@@ -259,7 +274,7 @@ jobs:
- name: Test webdav
run: python tests/compose/test.py webdav 1
env:
- MAILU_VERSION: ${{ env.BRANCH }}
+ MAILU_VERSION: ${{ env.MAILU_VERSION }}
TRAVIS_BRANCH: ${{ env.BRANCH }}
DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
@@ -300,9 +315,8 @@ jobs:
DOCKER_PW: ${{ secrets.Docker_Password }}
DOCKER_ORG: ${{ secrets.DOCKER_ORG }}
DOCKER_ORG_TESTS: ${{ secrets.DOCKER_ORG_TESTS }}
- MAILU_VERSION: ${{ env.BRANCH }}
+ MAILU_VERSION: ${{ env.MAILU_VERSION }}
TRAVIS_BRANCH: ${{ env.BRANCH }}
- TRAVIS_COMMIT_MESSAGE: ${{ github.event.head_commit.message }}
run: bash tests/deploy.sh
# This job is watched by bors. It only complets if building,testing and deploy worked.
diff --git a/tests/deploy.sh b/tests/deploy.sh
index a836417b..abb37b6b 100755
--- a/tests/deploy.sh
+++ b/tests/deploy.sh
@@ -3,14 +3,5 @@
# Skip deploy for staging branch
[ "$TRAVIS_BRANCH" = "staging" ] && exit 0
-# Retag in case of `bors try`
-if [ "$TRAVIS_BRANCH" = "testing" ]; then
- export DOCKER_ORG=$DOCKER_ORG_TESTS
- # Commit message is like "Try #99".
- # This sets the version tag to "pr-99"
- export MAILU_VERSION="pr-${TRAVIS_COMMIT_MESSAGE//[!0-9]/}"
- docker-compose -f tests/build.yml build
-fi
-
docker login -u $DOCKER_UN -p $DOCKER_PW
docker-compose -f tests/build.yml push
From 58e751415c5bdaf161792d6ce9f6e6334616c8c9 Mon Sep 17 00:00:00 2001
From: Dimitri Huisman
Date: Mon, 5 Jul 2021 17:21:28 +0000
Subject: [PATCH 136/181] Yet another small typo. One day computers will
understand I meant env.MAILU_BRANCH instead of MAILU_BRANCH.
---
.github/workflows/CI.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml
index 2db8de97..c495b3ec 100644
--- a/.github/workflows/CI.yml
+++ b/.github/workflows/CI.yml
@@ -53,7 +53,7 @@ jobs:
env:
MAILU_BRANCH: ${{ env.BRANCH }}
run: |
- echo "MAILU_VERSION=${{ MAILU_BRANCH }}" >> $GITHUB_ENV
+ echo "MAILU_VERSION=${{ env.MAILU_BRANCH }}" >> $GITHUB_ENV
- name: Create folder for storing images
run: |
sudo mkdir -p /images
From c3f47f1ca0bc033bd309a54696bd7674bc4013bc Mon Sep 17 00:00:00 2001
From: Dimitri Huisman
Date: Mon, 5 Jul 2021 17:44:10 +0000
Subject: [PATCH 137/181] Forgot that env var set in job 1 is not shared with
job 2. Added logic for deriving MAILU_VERSION to deploy job.
---
.github/workflows/CI.yml | 15 +++++++++++++++
1 file changed, 15 insertions(+)
diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml
index c495b3ec..55595838 100644
--- a/.github/workflows/CI.yml
+++ b/.github/workflows/CI.yml
@@ -295,6 +295,21 @@ jobs:
shell: bash
run: |
echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
+ #For branch TESTING, we set the image tag to PR-xxxx
+ - name: Derive MAILU_VERSION for branch testing
+ if: ${{ env.BRANCH == 'testing' }}
+ shell: bash
+ env:
+ COMMIT_MESSAGE: ${{ github.event.head_commit.message }}
+ run: |
+ echo "MAILU_VERSION=pr-${COMMIT_MESSAGE//[!0-9]/}" >> $GITHUB_ENV
+ - name: Derive MAILU_VERSION for other branches than testing
+ if: ${{ env.BRANCH != 'testing' }}
+ shell: bash
+ env:
+ MAILU_BRANCH: ${{ env.BRANCH }}
+ run: |
+ echo "MAILU_VERSION=${{ env.MAILU_BRANCH }}" >> $GITHUB_ENV
- name: Create folder for storing images
run: |
sudo mkdir -p /images
From 9790dcdabe33ce7579583ab35796e8027f090bad Mon Sep 17 00:00:00 2001
From: Alexander Graf
Date: Mon, 5 Jul 2021 23:04:07 +0200
Subject: [PATCH 138/181] updated dependencies
---
core/admin/requirements-prod.txt | 8 ++++----
1 file changed, 4 insertions(+), 4 deletions(-)
diff --git a/core/admin/requirements-prod.txt b/core/admin/requirements-prod.txt
index c4663557..88ff2981 100644
--- a/core/admin/requirements-prod.txt
+++ b/core/admin/requirements-prod.txt
@@ -5,7 +5,7 @@ bcrypt==3.1.6
blinker==1.4
cffi==1.12.3
Click==7.0
-cryptography==3.2
+cryptography==3.4.7
decorator==4.4.0
dnspython==1.16.0
dominate==2.3.5
@@ -25,7 +25,7 @@ idna==2.8
infinity==1.4
intervals==0.8.1
itsdangerous==1.1.0
-Jinja2==2.10.1
+Jinja2==2.11.3
limits==1.3
Mako==1.0.9
MarkupSafe==1.1.1
@@ -36,11 +36,11 @@ passlib==1.7.4
psycopg2==2.8.2
pycparser==2.19
Pygments==2.8.1
-pyOpenSSL==19.0.0
+pyOpenSSL==20.0.1
python-dateutil==2.8.0
python-editor==1.0.4
pytz==2019.1
-PyYAML==5.1
+PyYAML==5.4.1
redis==3.2.1
#alpine3:12 provides six==1.15.0
#six==1.12.0
From 3bb0d68ead2271ab3714b773b9aba2b98ab78666 Mon Sep 17 00:00:00 2001
From: Alexander Graf
Date: Mon, 5 Jul 2021 23:27:42 +0200
Subject: [PATCH 139/181] add cargo to build cryptography
---
core/admin/Dockerfile | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/core/admin/Dockerfile b/core/admin/Dockerfile
index f3b8643c..6ea12311 100644
--- a/core/admin/Dockerfile
+++ b/core/admin/Dockerfile
@@ -26,7 +26,7 @@ WORKDIR /app
COPY requirements-prod.txt requirements.txt
RUN apk add --no-cache libressl curl postgresql-libs mariadb-connector-c \
&& apk add --no-cache --virtual build-dep \
- libressl-dev libffi-dev python3-dev build-base postgresql-dev mariadb-connector-c-dev \
+ libressl-dev libffi-dev python3-dev build-base postgresql-dev mariadb-connector-c-dev cargo \
&& pip3 install -r requirements.txt \
&& apk del --no-cache build-dep
From 858312a5cb35fa97fe4108c816b61ead9972f50a Mon Sep 17 00:00:00 2001
From: Alexander Graf
Date: Tue, 6 Jul 2021 18:01:38 +0200
Subject: [PATCH 140/181] remove explicit jQuery dependency
---
core/admin/package.json | 1 -
1 file changed, 1 deletion(-)
diff --git a/core/admin/package.json b/core/admin/package.json
index a1107c69..931087e7 100644
--- a/core/admin/package.json
+++ b/core/admin/package.json
@@ -19,7 +19,6 @@
"file-loader": "^3.0.1",
"font-awesome": "^4.7.0",
"font-awesome-loader": "^1.0.2",
- "jQuery": "^1.7.4",
"less": "^3.9.0",
"less-loader": "^5.0.0",
"mini-css-extract-plugin": "^0.6.0",
From 1bb059f4c103fb34000ca47d2eab4ffdb35c0226 Mon Sep 17 00:00:00 2001
From: Alexander Graf
Date: Tue, 6 Jul 2021 19:36:28 +0200
Subject: [PATCH 141/181] switched to newest possible versions for nodejs v8
---
core/admin/package.json | 33 ++++++++++++++-------------------
1 file changed, 14 insertions(+), 19 deletions(-)
diff --git a/core/admin/package.json b/core/admin/package.json
index 931087e7..384b4466 100644
--- a/core/admin/package.json
+++ b/core/admin/package.json
@@ -2,33 +2,28 @@
"name": "mailu",
"version": "1.0.0",
"description": "Mailu admin assets",
- "main": "assest/index.js",
+ "main": "assets/index.js",
+ "directories": {
+ "lib": "lib"
+ },
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"author": "",
"license": "ISC",
"dependencies": {
- "@babel/core": "^7.4.4",
- "@babel/preset-env": "^7.4.4",
- "admin-lte": "^2.4.10",
- "babel-loader": "^8.0.5",
- "bootstrap": "^3.4.1",
+ "@babel/core": "^7.14.6",
+ "admin-lte": "^2.4.18",
+ "babel-loader": "^8.0.6",
"css-loader": "^2.1.1",
"expose-loader": "^0.7.5",
- "file-loader": "^3.0.1",
- "font-awesome": "^4.7.0",
- "font-awesome-loader": "^1.0.2",
- "less": "^3.9.0",
+ "less": "^4.1.1",
"less-loader": "^5.0.0",
- "mini-css-extract-plugin": "^0.6.0",
- "node-sass": "^4.12.0",
- "popper.js": "^1.15.0",
- "sass-loader": "^7.1.0",
- "select2": "^4.0.7-rc.0",
- "style-loader": "^0.23.1",
- "url-loader": "^1.1.2",
- "webpack": "^4.30.0",
- "webpack-cli": "^3.3.2"
+ "mini-css-extract-plugin": "^1.2.1",
+ "node-sass": "^4.13.1",
+ "sass-loader": "^7.3.1",
+ "url-loader": "^2.3.0",
+ "webpack": "^4.33.0",
+ "webpack-cli": "^3.3.12"
}
}
From 3c8a8aa8f0f0b0f2518afa83ff7efa2e8e1af5d4 Mon Sep 17 00:00:00 2001
From: Alexander Graf
Date: Tue, 6 Jul 2021 19:47:13 +0200
Subject: [PATCH 142/181] use less v3 to make less-loader happy
---
core/admin/package.json | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/core/admin/package.json b/core/admin/package.json
index 384b4466..a6eae07c 100644
--- a/core/admin/package.json
+++ b/core/admin/package.json
@@ -17,7 +17,7 @@
"babel-loader": "^8.0.6",
"css-loader": "^2.1.1",
"expose-loader": "^0.7.5",
- "less": "^4.1.1",
+ "less": "^3.13.1",
"less-loader": "^5.0.0",
"mini-css-extract-plugin": "^1.2.1",
"node-sass": "^4.13.1",
From 6377ccb2cb7d764b54447bb806db094797b935f5 Mon Sep 17 00:00:00 2001
From: Alexander Graf
Date: Wed, 7 Jul 2021 10:30:07 +0200
Subject: [PATCH 143/181] re-add jquery and select2 used in app.js
---
core/admin/package.json | 2 ++
1 file changed, 2 insertions(+)
diff --git a/core/admin/package.json b/core/admin/package.json
index a6eae07c..3435d881 100644
--- a/core/admin/package.json
+++ b/core/admin/package.json
@@ -17,11 +17,13 @@
"babel-loader": "^8.0.6",
"css-loader": "^2.1.1",
"expose-loader": "^0.7.5",
+ "jquery": "^3.6.0",
"less": "^3.13.1",
"less-loader": "^5.0.0",
"mini-css-extract-plugin": "^1.2.1",
"node-sass": "^4.13.1",
"sass-loader": "^7.3.1",
+ "select2": "^4.0.13",
"url-loader": "^2.3.0",
"webpack": "^4.33.0",
"webpack-cli": "^3.3.12"
From 56cfcf8b64850cf5cc415af3f982449287d880d3 Mon Sep 17 00:00:00 2001
From: Alexander Graf
Date: Wed, 7 Jul 2021 10:32:59 +0200
Subject: [PATCH 144/181] converted tabs to spaces
---
core/admin/package.json | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/core/admin/package.json b/core/admin/package.json
index 3435d881..77f39986 100644
--- a/core/admin/package.json
+++ b/core/admin/package.json
@@ -17,13 +17,13 @@
"babel-loader": "^8.0.6",
"css-loader": "^2.1.1",
"expose-loader": "^0.7.5",
- "jquery": "^3.6.0",
+ "jquery": "^3.6.0",
"less": "^3.13.1",
"less-loader": "^5.0.0",
"mini-css-extract-plugin": "^1.2.1",
"node-sass": "^4.13.1",
"sass-loader": "^7.3.1",
- "select2": "^4.0.13",
+ "select2": "^4.0.13",
"url-loader": "^2.3.0",
"webpack": "^4.33.0",
"webpack-cli": "^3.3.12"
From 180026bd77e91de87116faad33b059ed61d80d3c Mon Sep 17 00:00:00 2001
From: Alexander Graf
Date: Wed, 7 Jul 2021 11:33:48 +0200
Subject: [PATCH 145/181] also disable startdate
---
core/admin/mailu/ui/templates/user/reply.html | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/core/admin/mailu/ui/templates/user/reply.html b/core/admin/mailu/ui/templates/user/reply.html
index 7225a178..1a161188 100644
--- a/core/admin/mailu/ui/templates/user/reply.html
+++ b/core/admin/mailu/ui/templates/user/reply.html
@@ -14,7 +14,7 @@
{{ form.hidden_tag() }}
{{ macros.form_field(form.reply_enabled,
onchange="if(this.checked){$('#reply_subject,#reply_body,#reply_enddate,#reply_startdate').removeAttr('readonly')}
- else{$('#reply_subject,#reply_body,#reply_enddate').attr('readonly', '')}") }}
+ else{$('#reply_subject,#reply_body,#reply_enddate,#reply_startdate').attr('readonly', '')}") }}
{{ macros.form_field(form.reply_subject,
**{("rw" if user.reply_enabled else "readonly"): ""}) }}
{{ macros.form_field(form.reply_body, rows=10,
From f80e04a8c559f2f8ce21668d8b27794d37adb6ad Mon Sep 17 00:00:00 2001
From: networkException
Date: Thu, 8 Jul 2021 21:58:29 +0200
Subject: [PATCH 146/181] Docs: Replace hardcoded journald logpath with systemd
backend
The file at /var/log/messages is not universal for every
distribution. Fail2ban can access journald logs directly
by using the systemd backend.
---
docs/faq.rst | 4 ++--
towncrier/newsfragments/1857.doc | 1 +
2 files changed, 3 insertions(+), 2 deletions(-)
create mode 100644 towncrier/newsfragments/1857.doc
diff --git a/docs/faq.rst b/docs/faq.rst
index 403fd163..2fe2a7ec 100644
--- a/docs/faq.rst
+++ b/docs/faq.rst
@@ -513,8 +513,8 @@ follow these steps:
[bad-auth]
enabled = true
+ backend = systemd
filter = bad-auth
- logpath = /var/log/messages
bantime = 604800
findtime = 300
maxretry = 10
@@ -565,7 +565,7 @@ Restart the Fail2Ban service.
sudo systemctl restart fail2ban
-*Issue reference:* `85`_, `116`_, `171`_, `584`_, `592`_, `1727`_.
+*Issue reference:* `85`_, `116`_, `171`_, `584`_, `592`_, `1727`_, `1857`_.
Users can't change their password from webmail
``````````````````````````````````````````````
diff --git a/towncrier/newsfragments/1857.doc b/towncrier/newsfragments/1857.doc
new file mode 100644
index 00000000..06cb91ab
--- /dev/null
+++ b/towncrier/newsfragments/1857.doc
@@ -0,0 +1 @@
+Update fail2ban documentation to use systemd backend instead of filepath for journald
\ No newline at end of file
From a2cf13c548b9fa428d11e84cbce030d6a42dedd5 Mon Sep 17 00:00:00 2001
From: networkException
Date: Thu, 8 Jul 2021 22:11:54 +0200
Subject: [PATCH 147/181] Template: Update link to changelog entry
documentation for pull requests
---
PULL_REQUEST_TEMPLATE.md | 2 +-
docs/faq.rst | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/PULL_REQUEST_TEMPLATE.md b/PULL_REQUEST_TEMPLATE.md
index 059318fc..8fb3265d 100644
--- a/PULL_REQUEST_TEMPLATE.md
+++ b/PULL_REQUEST_TEMPLATE.md
@@ -13,4 +13,4 @@ Before we can consider review and merge, please make sure the following list is
If an entry in not applicable, you can check it or remove it from the list.
- [ ] In case of feature or enhancement: documentation updated accordingly
-- [ ] Unless it's docs or a minor change: add [changelog](https://mailu.io/master/contributors/guide.html#changelog) entry file.
+- [ ] Unless it's docs or a minor change: add [changelog](https://mailu.io/master/contributors/workflow.html#changelog) entry file.
diff --git a/docs/faq.rst b/docs/faq.rst
index 2fe2a7ec..5d975532 100644
--- a/docs/faq.rst
+++ b/docs/faq.rst
@@ -565,7 +565,7 @@ Restart the Fail2Ban service.
sudo systemctl restart fail2ban
-*Issue reference:* `85`_, `116`_, `171`_, `584`_, `592`_, `1727`_, `1857`_.
+*Issue reference:* `85`_, `116`_, `171`_, `584`_, `592`_, `1727`_.
Users can't change their password from webmail
``````````````````````````````````````````````
From a0dcd46483bb9de591391b8c4c33846c96fd3975 Mon Sep 17 00:00:00 2001
From: Florent Daigniere
Date: Wed, 14 Jul 2021 09:25:04 +0200
Subject: [PATCH 148/181] fix #1861: Handle colons in passwords
---
core/admin/mailu/internal/views/auth.py | 2 +-
towncrier/newsfragments/1861.bugfix | 1 +
2 files changed, 2 insertions(+), 1 deletion(-)
create mode 100644 towncrier/newsfragments/1861.bugfix
diff --git a/core/admin/mailu/internal/views/auth.py b/core/admin/mailu/internal/views/auth.py
index 8ff10aed..2baeddce 100644
--- a/core/admin/mailu/internal/views/auth.py
+++ b/core/admin/mailu/internal/views/auth.py
@@ -63,7 +63,7 @@ def basic_authentication():
authorization = flask.request.headers.get("Authorization")
if authorization and authorization.startswith("Basic "):
encoded = authorization.replace("Basic ", "")
- user_email, password = base64.b64decode(encoded).split(b":")
+ user_email, password = base64.b64decode(encoded).split(b":", 1)
user = models.User.query.get(user_email.decode("utf8"))
if nginx.check_credentials(user, password.decode('utf-8'), flask.request.remote_addr, "web"):
response = flask.Response()
diff --git a/towncrier/newsfragments/1861.bugfix b/towncrier/newsfragments/1861.bugfix
new file mode 100644
index 00000000..1e28d1b6
--- /dev/null
+++ b/towncrier/newsfragments/1861.bugfix
@@ -0,0 +1 @@
+Fix a bug preventing colons from being used in passwords when using radicale/webdav.
From 8bc1d6c08bf120d5e5bc6a40180b6750967a22aa Mon Sep 17 00:00:00 2001
From: Florent Daigniere
Date: Sun, 18 Jul 2021 18:24:46 +0200
Subject: [PATCH 149/181] Replace PUBLIC_HOSTNAME/IP in Received headers
This will ensure that we don't get spam points for not respecting the
RFC
---
core/postfix/conf/outclean_header_filter.cf | 2 +-
core/postfix/start.py | 2 ++
towncrier/newsfragments/191.bugfix | 1 +
3 files changed, 4 insertions(+), 1 deletion(-)
create mode 100644 towncrier/newsfragments/191.bugfix
diff --git a/core/postfix/conf/outclean_header_filter.cf b/core/postfix/conf/outclean_header_filter.cf
index 03e33ee9..7e0e92d3 100644
--- a/core/postfix/conf/outclean_header_filter.cf
+++ b/core/postfix/conf/outclean_header_filter.cf
@@ -4,7 +4,7 @@
# Remove the first line of the Received: header. Note that we cannot fully remove the Received: header
# because OpenDKIM requires that a header be present when signing outbound mail. The first line is
# where the user's home IP address would be.
-/^\s*Received:[^\n]*(.*)/ REPLACE Received: from authenticated-user (PRIMARY_HOSTNAME [PUBLIC_IP])$1
+/^\s*Received:[^\n]*(.*)/ REPLACE Received: from authenticated-user ({{OUTCLEAN}} [{{OUTCLEAN_ADDRESS}}])$1
# Remove other typically private information.
/^\s*User-Agent:/ IGNORE
diff --git a/core/postfix/start.py b/core/postfix/start.py
index b68303e1..1703d45e 100755
--- a/core/postfix/start.py
+++ b/core/postfix/start.py
@@ -36,6 +36,8 @@ os.environ["FRONT_ADDRESS"] = system.get_host_address_from_environment("FRONT",
os.environ["ADMIN_ADDRESS"] = system.get_host_address_from_environment("ADMIN", "admin")
os.environ["ANTISPAM_MILTER_ADDRESS"] = system.get_host_address_from_environment("ANTISPAM_MILTER", "antispam:11332")
os.environ["LMTP_ADDRESS"] = system.get_host_address_from_environment("LMTP", "imap:2525")
+os.environ["OUTCLEAN"] = os.environ["HOSTNAMES"].split(",")[0]
+os.environ["OUTCLEAN_ADDRESS"] = system.resolve_hostname(os.environ["OUTCLEAN"])
for postfix_file in glob.glob("/conf/*.cf"):
conf.jinja(postfix_file, os.environ, os.path.join("/etc/postfix", os.path.basename(postfix_file)))
diff --git a/towncrier/newsfragments/191.bugfix b/towncrier/newsfragments/191.bugfix
new file mode 100644
index 00000000..185d3074
--- /dev/null
+++ b/towncrier/newsfragments/191.bugfix
@@ -0,0 +1 @@
+Replace PUBLIC_HOSTNAME and PUBLIC_IP in "Received" headers to ensure that no undue spam points are attributed
From 1d65529c94f54de3cb49ed9584ed95f7860c26fa Mon Sep 17 00:00:00 2001
From: Florent Daigniere
Date: Sun, 18 Jul 2021 18:43:20 +0200
Subject: [PATCH 150/181] The lookup could fail; ensure we set something
---
core/postfix/start.py | 5 ++++-
1 file changed, 4 insertions(+), 1 deletion(-)
diff --git a/core/postfix/start.py b/core/postfix/start.py
index 1703d45e..701efec3 100755
--- a/core/postfix/start.py
+++ b/core/postfix/start.py
@@ -37,7 +37,10 @@ os.environ["ADMIN_ADDRESS"] = system.get_host_address_from_environment("ADMIN",
os.environ["ANTISPAM_MILTER_ADDRESS"] = system.get_host_address_from_environment("ANTISPAM_MILTER", "antispam:11332")
os.environ["LMTP_ADDRESS"] = system.get_host_address_from_environment("LMTP", "imap:2525")
os.environ["OUTCLEAN"] = os.environ["HOSTNAMES"].split(",")[0]
-os.environ["OUTCLEAN_ADDRESS"] = system.resolve_hostname(os.environ["OUTCLEAN"])
+try:
+ os.environ["OUTCLEAN_ADDRESS"] = system.resolve_hostname(os.environ["OUTCLEAN"])
+except:
+ os.environ["OUTCLEAN_ADDRESS"] = "10.10.10.10"
for postfix_file in glob.glob("/conf/*.cf"):
conf.jinja(postfix_file, os.environ, os.path.join("/etc/postfix", os.path.basename(postfix_file)))
From 06019452e3246111c94092ef197602eb0c883be8 Mon Sep 17 00:00:00 2001
From: Dimitri Huisman
Date: Tue, 20 Jul 2021 11:22:02 +0000
Subject: [PATCH 151/181] Remove dot in blueprint name to prevent critical
flask initialisation error.
---
setup/server.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/setup/server.py b/setup/server.py
index 0d58fa25..5be1fc83 100644
--- a/setup/server.py
+++ b/setup/server.py
@@ -54,11 +54,11 @@ def build_app(path):
@app.context_processor
def app_context():
return dict(
- versions=os.getenv("VERSIONS","master").split(','),
+ versions=os.getenv("VERSIONS","master").split(','),
stable_version = os.getenv("stable_version", "master")
)
- prefix_bp = flask.Blueprint(version, __name__)
+ prefix_bp = flask.Blueprint(version.replace(".", "_"), __name__)
prefix_bp.jinja_loader = jinja2.ChoiceLoader([
jinja2.FileSystemLoader(os.path.join(path, "templates")),
jinja2.FileSystemLoader(os.path.join(path, "flavors"))
From 2e883c7ae22b5539f11f01859384f5df0b21ea61 Mon Sep 17 00:00:00 2001
From: Dimitri Huisman
Date: Tue, 20 Jul 2021 11:44:29 +0000
Subject: [PATCH 152/181] Add changelog
---
towncrier/newsfragments/1874.bugfix | 1 +
1 file changed, 1 insertion(+)
create mode 100644 towncrier/newsfragments/1874.bugfix
diff --git a/towncrier/newsfragments/1874.bugfix b/towncrier/newsfragments/1874.bugfix
new file mode 100644
index 00000000..a301835e
--- /dev/null
+++ b/towncrier/newsfragments/1874.bugfix
@@ -0,0 +1 @@
+Remove dot in blueprint name to prevent critical flask startup error in setup.
From 64bf75efb1d97771bf7cb8ad5319d0c130b2605a Mon Sep 17 00:00:00 2001
From: Diman0
Date: Wed, 21 Jul 2021 12:18:14 +0200
Subject: [PATCH 153/181] Added missing extension in conf.py. Added missing
library in requirements.txt. Sphinx is only compatible with docutils<0.17
---
docs/conf.py | 2 +-
docs/requirements.txt | 1 +
2 files changed, 2 insertions(+), 1 deletion(-)
diff --git a/docs/conf.py b/docs/conf.py
index 6b19f967..8f174b64 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -4,7 +4,7 @@
import os
-extensions = ['sphinx.ext.imgmath', 'sphinx.ext.viewcode']
+extensions = ['sphinx.ext.imgmath', 'sphinx.ext.viewcode', 'sphinx_rtd_theme']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
diff --git a/docs/requirements.txt b/docs/requirements.txt
index 4afd9bb6..f49e26d5 100644
--- a/docs/requirements.txt
+++ b/docs/requirements.txt
@@ -2,3 +2,4 @@ recommonmark
Sphinx
sphinx-autobuild
sphinx-rtd-theme
+docutils==0.16
From b140fa54acfcb1394e5f58892218e20d7aa31bae Mon Sep 17 00:00:00 2001
From: Dimitri Huisman
Date: Thu, 22 Jul 2021 14:43:03 +0000
Subject: [PATCH 154/181] Update jquery js dependencies in setup
---
setup/templates/steps/compose/02_services.html | 2 +-
setup/templates/steps/config.html | 2 +-
setup/templates/steps/stack/02_services.html | 2 +-
3 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/setup/templates/steps/compose/02_services.html b/setup/templates/steps/compose/02_services.html
index 20d4d7cb..5118c304 100644
--- a/setup/templates/steps/compose/02_services.html
+++ b/setup/templates/steps/compose/02_services.html
@@ -59,7 +59,7 @@ the security implications caused by such an increase of attack surface.
Fetchmail allows users to retrieve mail from an external mail-server via IMAP/POP3 and puts it in their inbox.
-
+
diff --git a/setup/templates/steps/config.html b/setup/templates/steps/config.html
index 29d8dddd..3769a210 100644
--- a/setup/templates/steps/config.html
+++ b/setup/templates/steps/config.html
@@ -83,7 +83,7 @@ manage your email domains, users, etc.
-
+
diff --git a/setup/templates/steps/stack/02_services.html b/setup/templates/steps/stack/02_services.html
index 3f5186b0..6fce0ae6 100644
--- a/setup/templates/steps/stack/02_services.html
+++ b/setup/templates/steps/stack/02_services.html
@@ -55,7 +55,7 @@ the security implications caused by such an increase of attack surface.
Fetchmail allows users to retrieve mail from an external mail-server via IMAP/POP3 and puts it in their inbox.
-
+
From b0fb9d822bed082ee6630baeaf8526ab75934ff4 Mon Sep 17 00:00:00 2001
From: Dimitri Huisman
Date: Thu, 22 Jul 2021 14:44:30 +0000
Subject: [PATCH 155/181] Adapt requirements.txt to use pinned versions.
---
setup/requirements.txt | 8 ++++----
1 file changed, 4 insertions(+), 4 deletions(-)
diff --git a/setup/requirements.txt b/setup/requirements.txt
index b6bf2120..f2eb2157 100644
--- a/setup/requirements.txt
+++ b/setup/requirements.txt
@@ -1,4 +1,4 @@
-flask
-flask-bootstrap
-redis
-gunicorn
+Flask==1.0.2
+Flask-Bootstrap==3.3.7.1
+gunicorn==19.9.0
+redis==3.2.1
From 67e00bb1e77fa3350610e855c1396952ea76ea21 Mon Sep 17 00:00:00 2001
From: Dimitri Huisman
Date: Thu, 22 Jul 2021 14:56:30 +0000
Subject: [PATCH 156/181] Add changelog
---
towncrier/newsfragments/1880.feature | 1 +
1 file changed, 1 insertion(+)
create mode 100644 towncrier/newsfragments/1880.feature
diff --git a/towncrier/newsfragments/1880.feature b/towncrier/newsfragments/1880.feature
new file mode 100644
index 00000000..212dc906
--- /dev/null
+++ b/towncrier/newsfragments/1880.feature
@@ -0,0 +1 @@
+Update jquery used in setup. Set pinned versions in requirements.txt for setup. This is a security update.
From 9d2629a04e7aaa8c9fc199dac8a6a0631a857fac Mon Sep 17 00:00:00 2001
From: Florent Daigniere
Date: Sat, 24 Jul 2021 12:40:38 +0200
Subject: [PATCH 157/181] fix 1884: always lookup a FQDN
---
core/postfix/start.py | 6 +++++-
1 file changed, 5 insertions(+), 1 deletion(-)
diff --git a/core/postfix/start.py b/core/postfix/start.py
index 701efec3..7a2b57a1 100755
--- a/core/postfix/start.py
+++ b/core/postfix/start.py
@@ -38,7 +38,11 @@ os.environ["ANTISPAM_MILTER_ADDRESS"] = system.get_host_address_from_environment
os.environ["LMTP_ADDRESS"] = system.get_host_address_from_environment("LMTP", "imap:2525")
os.environ["OUTCLEAN"] = os.environ["HOSTNAMES"].split(",")[0]
try:
- os.environ["OUTCLEAN_ADDRESS"] = system.resolve_hostname(os.environ["OUTCLEAN"])
+ _to_lookup = os.environ["OUTCLEAN"]
+ # Ensure we lookup a FQDN: @see #1884
+ if not _to_lookup.endswith('.'):
+ _to_lookup += '.'
+ os.environ["OUTCLEAN_ADDRESS"] = system.resolve_hostname(_to_lookup)
except:
os.environ["OUTCLEAN_ADDRESS"] = "10.10.10.10"
From fa915d78624114201074b0bbe919cdb708cca568 Mon Sep 17 00:00:00 2001
From: Florent Daigniere
Date: Sat, 24 Jul 2021 14:39:40 +0200
Subject: [PATCH 158/181] Fix 1294 ensure podop's socket is owned by postfix
---
core/postfix/start.py | 3 ++-
towncrier/newsfragments/1294.bugfix | 1 +
2 files changed, 3 insertions(+), 1 deletion(-)
create mode 100644 towncrier/newsfragments/1294.bugfix
diff --git a/core/postfix/start.py b/core/postfix/start.py
index 701efec3..125404f6 100755
--- a/core/postfix/start.py
+++ b/core/postfix/start.py
@@ -8,12 +8,13 @@ import logging as log
import sys
from podop import run_server
+from pwd import getpwnam
from socrate import system, conf
log.basicConfig(stream=sys.stderr, level=os.environ.get("LOG_LEVEL", "WARNING"))
def start_podop():
- os.setuid(100)
+ os.setuid(getpwnam('postfix').pw_uid)
url = "http://" + os.environ["ADMIN_ADDRESS"] + "/internal/postfix/"
# TODO: Remove verbosity setting from Podop?
run_server(0, "postfix", "/tmp/podop.socket", [
diff --git a/towncrier/newsfragments/1294.bugfix b/towncrier/newsfragments/1294.bugfix
new file mode 100644
index 00000000..68bb7a8a
--- /dev/null
+++ b/towncrier/newsfragments/1294.bugfix
@@ -0,0 +1 @@
+Ensure that the podop socket is always owned by the postfix user (wasn't the case when build using non-standard base images... typically for arm64)
From 8235085848e1e8586a7455bf214bd240f06d549e Mon Sep 17 00:00:00 2001
From: networkException
Date: Sat, 24 Jul 2021 15:25:59 +0200
Subject: [PATCH 159/181] Docs: Limit fail2ban matches to front container
Previously fail2ban matched against all journal entries. This patch
adds a tag to the logdriver and fail2ban filter documentation that
limits the matches to entries from the front container
---
docs/faq.rst | 3 +++
1 file changed, 3 insertions(+)
diff --git a/docs/faq.rst b/docs/faq.rst
index 5d975532..f38fdca2 100644
--- a/docs/faq.rst
+++ b/docs/faq.rst
@@ -497,6 +497,8 @@ follow these steps:
logging:
driver: journald
+ options:
+ tag: mailu-front
2. Add the /etc/fail2ban/filter.d/bad-auth.conf
@@ -506,6 +508,7 @@ follow these steps:
[Definition]
failregex = .* client login failed: .+ client:\
ignoreregex =
+ journalmatch = CONTAINER_TAG=mailu-front
3. Add the /etc/fail2ban/jail.d/bad-auth.conf
From 8d9f3214cc5663dc29f7dcf3a03bc373a51d010b Mon Sep 17 00:00:00 2001
From: Florent Daigniere
Date: Sat, 24 Jul 2021 15:45:25 +0200
Subject: [PATCH 160/181] Use threads in gunicorn rather than processes
This ensures that we share the auth-cache... will enable memory savings
and may improve performances when a higher number of cores is available
"smarter default"
---
core/admin/start.py | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/core/admin/start.py b/core/admin/start.py
index 2c925e01..0eff3bbe 100755
--- a/core/admin/start.py
+++ b/core/admin/start.py
@@ -19,7 +19,8 @@ if account is not None and domain is not None and password is not None:
os.system("flask mailu admin %s %s '%s' --mode %s" % (account, domain, password, mode))
start_command="".join([
- "gunicorn -w 4 -b :80 ",
+ "gunicorn --threads ", str(os.cpu_count()),
+ " -b :80 ",
"--access-logfile - " if (log.root.level<=log.INFO) else "",
"--error-logfile - ",
"--preload ",
From ad1b036f2068f5fe70641e549df34cb0b9cc0b21 Mon Sep 17 00:00:00 2001
From: Alexander Graf
Date: Sat, 24 Jul 2021 20:21:38 +0200
Subject: [PATCH 161/181] fix Email class
---
core/admin/mailu/models.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/core/admin/mailu/models.py b/core/admin/mailu/models.py
index b5ba29c0..fc9fd623 100644
--- a/core/admin/mailu/models.py
+++ b/core/admin/mailu/models.py
@@ -371,8 +371,8 @@ class Email(object):
@classmethod
def __declare_last__(cls):
# gets called after mappings are completed
- sqlalchemy.event.listen(User.localpart, 'set', cls._update_localpart, propagate=True)
- sqlalchemy.event.listen(User.domain_name, 'set', cls._update_domain_name, propagate=True)
+ sqlalchemy.event.listen(cls.localpart, 'set', cls._update_localpart, propagate=True)
+ sqlalchemy.event.listen(cls.domain_name, 'set', cls._update_domain_name, propagate=True)
def sendmail(self, subject, body):
""" send an email to the address """
From 54b46a13c6bc730d5d0faa27d5c540edae9b24d4 Mon Sep 17 00:00:00 2001
From: Alexander Graf
Date: Sun, 25 Jul 2021 15:51:13 +0200
Subject: [PATCH 162/181] save dkim key after creation
---
core/admin/mailu/models.py | 2 +-
core/admin/mailu/ui/views/domains.py | 2 ++
2 files changed, 3 insertions(+), 1 deletion(-)
diff --git a/core/admin/mailu/models.py b/core/admin/mailu/models.py
index b5ba29c0..7a6eb308 100644
--- a/core/admin/mailu/models.py
+++ b/core/admin/mailu/models.py
@@ -272,7 +272,7 @@ class Domain(Base):
return dkim.strip_key(dkim_key).decode('utf8')
def generate_dkim_key(self):
- """ generate and activate new DKIM key """
+ """ generate new DKIM key """
self.dkim_key = dkim.gen_key()
def has_email(self, localpart):
diff --git a/core/admin/mailu/ui/views/domains.py b/core/admin/mailu/ui/views/domains.py
index 719d3844..f394ce7d 100644
--- a/core/admin/mailu/ui/views/domains.py
+++ b/core/admin/mailu/ui/views/domains.py
@@ -74,6 +74,8 @@ def domain_details(domain_name):
def domain_genkeys(domain_name):
domain = models.Domain.query.get(domain_name) or flask.abort(404)
domain.generate_dkim_key()
+ models.db.session.add(domain)
+ models.db.session.commit()
return flask.redirect(
flask.url_for(".domain_details", domain_name=domain_name))
From 6856c2c80f1291771a2faed5b72883e62028fcc6 Mon Sep 17 00:00:00 2001
From: Alexander Graf
Date: Fri, 30 Jul 2021 22:26:20 +0200
Subject: [PATCH 163/181] treat localpart case insensitive again
by lowercasing it where necessary
---
core/admin/mailu/models.py | 21 +++++++++++----------
1 file changed, 11 insertions(+), 10 deletions(-)
diff --git a/core/admin/mailu/models.py b/core/admin/mailu/models.py
index b5ba29c0..4fdf5afb 100644
--- a/core/admin/mailu/models.py
+++ b/core/admin/mailu/models.py
@@ -57,10 +57,9 @@ class IdnaEmail(db.TypeDecorator):
def process_bind_param(self, value, dialect):
""" encode unicode domain part of email address to punycode """
- localpart, domain_name = value.rsplit('@', 1)
+ localpart, domain_name = value.lower().rsplit('@', 1)
if '@' in localpart:
raise ValueError('email local part must not contain "@"')
- domain_name = domain_name.lower()
return f'{localpart}@{idna.encode(domain_name).decode("ascii")}'
def process_result_value(self, value, dialect):
@@ -277,6 +276,7 @@ class Domain(Base):
def has_email(self, localpart):
""" checks if localpart is configured for domain """
+ localpart = localpart.lower()
for email in chain(self.users, self.aliases):
if email.localpart == localpart:
return True
@@ -355,8 +355,8 @@ class Email(object):
@email.setter
def email(self, value):
""" setter for email - sets _email, localpart and domain_name at once """
- self.localpart, self.domain_name = value.rsplit('@', 1)
- self._email = value
+ self._email = value.lower()
+ self.localpart, self.domain_name = self._email.rsplit('@', 1)
@staticmethod
def _update_localpart(target, value, *_):
@@ -389,8 +389,7 @@ class Email(object):
def resolve_domain(cls, email):
""" resolves domain alternative to real domain """
localpart, domain_name = email.rsplit('@', 1) if '@' in email else (None, email)
- alternative = Alternative.query.get(domain_name)
- if alternative:
+ if alternative := Alternative.query.get(domain_name):
domain_name = alternative.domain_name
return (localpart, domain_name)
@@ -401,12 +400,14 @@ class Email(object):
localpart_stripped = None
stripped_alias = None
- if os.environ.get('RECIPIENT_DELIMITER') in localpart:
- localpart_stripped = localpart.rsplit(os.environ.get('RECIPIENT_DELIMITER'), 1)[0]
+ delim = os.environ.get('RECIPIENT_DELIMITER')
+ if delim in localpart:
+ localpart_stripped = localpart.rsplit(delim, 1)[0]
user = User.query.get(f'{localpart}@{domain_name}')
if not user and localpart_stripped:
user = User.query.get(f'{localpart_stripped}@{domain_name}')
+
if user:
email = f'{localpart}@{domain_name}'
@@ -416,15 +417,15 @@ class Email(object):
destination.append(email)
else:
destination = [email]
+
return destination
pure_alias = Alias.resolve(localpart, domain_name)
- stripped_alias = Alias.resolve(localpart_stripped, domain_name)
if pure_alias and not pure_alias.wildcard:
return pure_alias.destination
- if stripped_alias:
+ if stripped_alias := Alias.resolve(localpart_stripped, domain_name):
return stripped_alias.destination
if pure_alias:
From f8362d04e4d46c44ab07beffb77cdd041af193c0 Mon Sep 17 00:00:00 2001
From: Florent Daigniere
Date: Tue, 3 Aug 2021 13:44:56 +0200
Subject: [PATCH 164/181] Switch to openssl to workaround alpine #12763
---
core/admin/Dockerfile | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/core/admin/Dockerfile b/core/admin/Dockerfile
index 3153bd9e..a0d3d996 100644
--- a/core/admin/Dockerfile
+++ b/core/admin/Dockerfile
@@ -24,9 +24,9 @@ RUN mkdir -p /app
WORKDIR /app
COPY requirements-prod.txt requirements.txt
-RUN apk add --no-cache libressl curl postgresql-libs mariadb-connector-c \
+RUN apk add --no-cache openssl curl postgresql-libs mariadb-connector-c \
&& apk add --no-cache --virtual build-dep \
- libressl-dev libffi-dev python3-dev build-base postgresql-dev mariadb-connector-c-dev \
+ openssl-dev libffi-dev python3-dev build-base postgresql-dev mariadb-connector-c-dev \
&& pip3 install -r requirements.txt \
&& apk del --no-cache build-dep
From defea3258ddd3faa15563630b7a10625b5ffa4a6 Mon Sep 17 00:00:00 2001
From: Florent Daigniere
Date: Tue, 3 Aug 2021 13:58:54 +0200
Subject: [PATCH 165/181] update arm builds too
---
core/admin/Dockerfile | 2 +-
webmails/rainloop/Dockerfile | 2 +-
webmails/roundcube/Dockerfile | 2 +-
3 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/core/admin/Dockerfile b/core/admin/Dockerfile
index 0a3b8468..97cf1736 100644
--- a/core/admin/Dockerfile
+++ b/core/admin/Dockerfile
@@ -2,7 +2,7 @@
ARG DISTRO=alpine:3.14
ARG ARCH=""
FROM ${ARCH}node:8 as assets
-COPY --from=balenalib/rpi-alpine:3.10 /usr/bin/qemu-arm-static /usr/bin/qemu-arm-static
+COPY --from=balenalib/rpi-alpine:3.14 /usr/bin/qemu-arm-static /usr/bin/qemu-arm-static
COPY package.json ./
RUN npm install
diff --git a/webmails/rainloop/Dockerfile b/webmails/rainloop/Dockerfile
index 9c65f277..9814413d 100644
--- a/webmails/rainloop/Dockerfile
+++ b/webmails/rainloop/Dockerfile
@@ -3,7 +3,7 @@ ARG QEMU=other
# NOTE: only add file if building for arm
FROM ${ARCH}php:7.4-apache as build_arm
-ONBUILD COPY --from=balenalib/rpi-alpine:3.10 /usr/bin/qemu-arm-static /usr/bin/qemu-arm-static
+ONBUILD COPY --from=balenalib/rpi-alpine:3.14 /usr/bin/qemu-arm-static /usr/bin/qemu-arm-static
FROM ${ARCH}php:7.4-apache as build_other
diff --git a/webmails/roundcube/Dockerfile b/webmails/roundcube/Dockerfile
index fae02ce0..4d3e36df 100644
--- a/webmails/roundcube/Dockerfile
+++ b/webmails/roundcube/Dockerfile
@@ -2,7 +2,7 @@
ARG ARCH=""
ARG QEMU=other
FROM ${ARCH}php:7.4-apache as build_arm
-ONBUILD COPY --from=balenalib/rpi-alpine:3.10 /usr/bin/qemu-arm-static /usr/bin/qemu-arm-static
+ONBUILD COPY --from=balenalib/rpi-alpine:3.14 /usr/bin/qemu-arm-static /usr/bin/qemu-arm-static
FROM ${ARCH}php:7.4-apache as build_other
From ccb3631622e535c902d22c7ced649cd374d1ac97 Mon Sep 17 00:00:00 2001
From: Florent Daigniere
Date: Tue, 3 Aug 2021 14:01:44 +0200
Subject: [PATCH 166/181] still need pip3
---
docs/Dockerfile | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docs/Dockerfile b/docs/Dockerfile
index 11f66b49..29234b19 100644
--- a/docs/Dockerfile
+++ b/docs/Dockerfile
@@ -6,7 +6,7 @@ COPY requirements.txt /requirements.txt
ARG version=master
ENV VERSION=$version
-RUN apk add --no-cache nginx curl python3 \
+RUN apk add --no-cache nginx curl python3 py3-pip \
&& pip3 install -r /requirements.txt \
&& mkdir /run/nginx
From 2b63280f59658ca05fd5fb3870126556d6c29004 Mon Sep 17 00:00:00 2001
From: Florent Daigniere
Date: Tue, 3 Aug 2021 14:16:14 +0200
Subject: [PATCH 167/181] doh
---
docs/Dockerfile | 7 +++----
1 file changed, 3 insertions(+), 4 deletions(-)
diff --git a/docs/Dockerfile b/docs/Dockerfile
index 29234b19..253c8420 100644
--- a/docs/Dockerfile
+++ b/docs/Dockerfile
@@ -1,4 +1,4 @@
-ARG DISTRO=alpine:3.14
+ARG DISTRO=python:3.8-alpine3.14
FROM $DISTRO
COPY requirements.txt /requirements.txt
@@ -6,9 +6,8 @@ COPY requirements.txt /requirements.txt
ARG version=master
ENV VERSION=$version
-RUN apk add --no-cache nginx curl python3 py3-pip \
- && pip3 install -r /requirements.txt \
- && mkdir /run/nginx
+RUN apk add --no-cache nginx curl \
+ && pip3 install -r /requirements.txt
COPY ./nginx.conf /etc/nginx/conf.d/default.conf
COPY . /docs
From 609e0f9f7c5b60e5db498d3b3817b41e65c116c5 Mon Sep 17 00:00:00 2001
From: Dimitri Huisman <52963853+Diman0@users.noreply.github.com>
Date: Thu, 5 Aug 2021 00:12:37 +0200
Subject: [PATCH 168/181] Env vars are not shared between jobs
---
.github/workflows/CI.yml | 84 ++++++++++++++++++++++++++++++++++++++++
1 file changed, 84 insertions(+)
diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml
index 55595838..19a445b4 100644
--- a/.github/workflows/CI.yml
+++ b/.github/workflows/CI.yml
@@ -91,6 +91,20 @@ jobs:
shell: bash
run: |
echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
+ - name: Derive MAILU_VERSION for branch testing
+ if: ${{ env.BRANCH == 'testing' }}
+ shell: bash
+ env:
+ COMMIT_MESSAGE: ${{ github.event.head_commit.message }}
+ run: |
+ echo "MAILU_VERSION=pr-${COMMIT_MESSAGE//[!0-9]/}" >> $GITHUB_ENV
+ - name: Derive MAILU_VERSION for other branches than testing
+ if: ${{ env.BRANCH != 'testing' }}
+ shell: bash
+ env:
+ MAILU_BRANCH: ${{ env.BRANCH }}
+ run: |
+ echo "MAILU_VERSION=${{ env.MAILU_BRANCH }}" >> $GITHUB_ENV
- name: Create folder for storing images
run: |
sudo mkdir -p /images
@@ -124,6 +138,20 @@ jobs:
shell: bash
run: |
echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
+ - name: Derive MAILU_VERSION for branch testing
+ if: ${{ env.BRANCH == 'testing' }}
+ shell: bash
+ env:
+ COMMIT_MESSAGE: ${{ github.event.head_commit.message }}
+ run: |
+ echo "MAILU_VERSION=pr-${COMMIT_MESSAGE//[!0-9]/}" >> $GITHUB_ENV
+ - name: Derive MAILU_VERSION for other branches than testing
+ if: ${{ env.BRANCH != 'testing' }}
+ shell: bash
+ env:
+ MAILU_BRANCH: ${{ env.BRANCH }}
+ run: |
+ echo "MAILU_VERSION=${{ env.MAILU_BRANCH }}" >> $GITHUB_ENV
- name: Create folder for storing images
run: |
sudo mkdir -p /images
@@ -157,6 +185,20 @@ jobs:
shell: bash
run: |
echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
+ - name: Derive MAILU_VERSION for branch testing
+ if: ${{ env.BRANCH == 'testing' }}
+ shell: bash
+ env:
+ COMMIT_MESSAGE: ${{ github.event.head_commit.message }}
+ run: |
+ echo "MAILU_VERSION=pr-${COMMIT_MESSAGE//[!0-9]/}" >> $GITHUB_ENV
+ - name: Derive MAILU_VERSION for other branches than testing
+ if: ${{ env.BRANCH != 'testing' }}
+ shell: bash
+ env:
+ MAILU_BRANCH: ${{ env.BRANCH }}
+ run: |
+ echo "MAILU_VERSION=${{ env.MAILU_BRANCH }}" >> $GITHUB_ENV
- name: Create folder for storing images
run: |
sudo mkdir -p /images
@@ -190,6 +232,20 @@ jobs:
shell: bash
run: |
echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
+ - name: Derive MAILU_VERSION for branch testing
+ if: ${{ env.BRANCH == 'testing' }}
+ shell: bash
+ env:
+ COMMIT_MESSAGE: ${{ github.event.head_commit.message }}
+ run: |
+ echo "MAILU_VERSION=pr-${COMMIT_MESSAGE//[!0-9]/}" >> $GITHUB_ENV
+ - name: Derive MAILU_VERSION for other branches than testing
+ if: ${{ env.BRANCH != 'testing' }}
+ shell: bash
+ env:
+ MAILU_BRANCH: ${{ env.BRANCH }}
+ run: |
+ echo "MAILU_VERSION=${{ env.MAILU_BRANCH }}" >> $GITHUB_ENV
- name: Create folder for storing images
run: |
sudo mkdir -p /images
@@ -223,6 +279,20 @@ jobs:
shell: bash
run: |
echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
+ - name: Derive MAILU_VERSION for branch testing
+ if: ${{ env.BRANCH == 'testing' }}
+ shell: bash
+ env:
+ COMMIT_MESSAGE: ${{ github.event.head_commit.message }}
+ run: |
+ echo "MAILU_VERSION=pr-${COMMIT_MESSAGE//[!0-9]/}" >> $GITHUB_ENV
+ - name: Derive MAILU_VERSION for other branches than testing
+ if: ${{ env.BRANCH != 'testing' }}
+ shell: bash
+ env:
+ MAILU_BRANCH: ${{ env.BRANCH }}
+ run: |
+ echo "MAILU_VERSION=${{ env.MAILU_BRANCH }}" >> $GITHUB_ENV
- name: Create folder for storing images
run: |
sudo mkdir -p /images
@@ -256,6 +326,20 @@ jobs:
shell: bash
run: |
echo "BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV
+ - name: Derive MAILU_VERSION for branch testing
+ if: ${{ env.BRANCH == 'testing' }}
+ shell: bash
+ env:
+ COMMIT_MESSAGE: ${{ github.event.head_commit.message }}
+ run: |
+ echo "MAILU_VERSION=pr-${COMMIT_MESSAGE//[!0-9]/}" >> $GITHUB_ENV
+ - name: Derive MAILU_VERSION for other branches than testing
+ if: ${{ env.BRANCH != 'testing' }}
+ shell: bash
+ env:
+ MAILU_BRANCH: ${{ env.BRANCH }}
+ run: |
+ echo "MAILU_VERSION=${{ env.MAILU_BRANCH }}" >> $GITHUB_ENV
- name: Create folder for storing images
run: |
sudo mkdir -p /images
From 98933f9478aab9f8d04cb0afba5eeac2c4dee9ef Mon Sep 17 00:00:00 2001
From: Erriez
Date: Thu, 5 Aug 2021 19:37:06 +0200
Subject: [PATCH 169/181] Optimize docs/Dockerfile
- Convert .rst to .html in temporary python:3.8-alpine3.14 build image
- Remove all unused packages
- Use nginx:1.21-alpine deployment image
---
docs/Dockerfile | 30 +++++++++++++++++++-----------
1 file changed, 19 insertions(+), 11 deletions(-)
diff --git a/docs/Dockerfile b/docs/Dockerfile
index 70c9c3c4..289697da 100644
--- a/docs/Dockerfile
+++ b/docs/Dockerfile
@@ -1,20 +1,28 @@
-ARG DISTRO=alpine:3.8
-FROM $DISTRO
-
-COPY requirements.txt /requirements.txt
+# Convert .rst files to .html in temporary build container
+FROM python:3.8-alpine3.14 AS build
ARG version=master
ENV VERSION=$version
-RUN apk add --no-cache nginx curl python3 \
- && pip3 install -r /requirements.txt \
- && mkdir /run/nginx
-
-COPY ./nginx.conf /etc/nginx/conf.d/default.conf
+COPY requirements.txt /requirements.txt
COPY . /docs
-RUN mkdir -p /build/$VERSION \
- && sphinx-build -W /docs /build/$VERSION
+RUN apk add --no-cache --virtual .build-deps \
+ gcc musl-dev \
+ && pip3 install -r /requirements.txt \
+ && mkdir -p /build/$VERSION \
+ && sphinx-build -W /docs /build/$VERSION \
+ && apk del .build-deps
+
+
+# Build nginx deployment image including generated html
+FROM nginx:1.21-alpine
+
+ARG version=master
+ENV VERSION=$version
+
+COPY ./nginx.conf /etc/nginx/conf.d/default.conf
+COPY --from=build /build/$VERSION /build/$VERSION
EXPOSE 80/tcp
From 588904078edd04ed08db309ceb7dd11e751c37b9 Mon Sep 17 00:00:00 2001
From: Diman0
Date: Fri, 6 Aug 2021 16:27:07 +0200
Subject: [PATCH 170/181] Set default of AUTH_RATELIMIT_SUBNET to False.
Increase default AUTH_RATELIMIT value.
---
core/admin/mailu/configuration.py | 4 ++--
docs/configuration.rst | 2 +-
setup/templates/steps/config.html | 2 +-
towncrier/newsfragments/1867.feature | 1 +
4 files changed, 5 insertions(+), 4 deletions(-)
create mode 100644 towncrier/newsfragments/1867.feature
diff --git a/core/admin/mailu/configuration.py b/core/admin/mailu/configuration.py
index 3d1b4fb5..d2d34d88 100644
--- a/core/admin/mailu/configuration.py
+++ b/core/admin/mailu/configuration.py
@@ -34,8 +34,8 @@ DEFAULT_CONFIG = {
'POSTMASTER': 'postmaster',
'TLS_FLAVOR': 'cert',
'INBOUND_TLS_ENFORCE': False,
- 'AUTH_RATELIMIT': '10/minute;1000/hour',
- 'AUTH_RATELIMIT_SUBNET': True,
+ 'AUTH_RATELIMIT': '1000/minute;10000/hour',
+ 'AUTH_RATELIMIT_SUBNET': False,
'DISABLE_STATISTICS': False,
# Mail settings
'DMARC_RUA': None,
diff --git a/docs/configuration.rst b/docs/configuration.rst
index 16ea23c3..d7ebfc11 100644
--- a/docs/configuration.rst
+++ b/docs/configuration.rst
@@ -41,7 +41,7 @@ The ``AUTH_RATELIMIT`` holds a security setting for fighting attackers that
try to guess user passwords. The value is the limit of failed authentication attempts
that a single IP address can perform against IMAP, POP and SMTP authentication endpoints.
-If ``AUTH_RATELIMIT_SUBNET`` is ``True`` (which is the default), the ``AUTH_RATELIMIT``
+If ``AUTH_RATELIMIT_SUBNET`` is ``True`` (default: False), the ``AUTH_RATELIMIT``
rules does also apply to auth requests coming from ``SUBNET``, especially for the webmail.
If you disable this, ensure that the rate limit on the webmail is enforced in a different
way (e.g. roundcube plug-in), otherwise an attacker can simply bypass the limit using webmail.
diff --git a/setup/templates/steps/config.html b/setup/templates/steps/config.html
index 3769a210..fee66933 100644
--- a/setup/templates/steps/config.html
+++ b/setup/templates/steps/config.html
@@ -51,7 +51,7 @@ Or in plain english: if receivers start to classify your mail as spam, this post
/ minute
+ value="10000" required > / minute
diff --git a/towncrier/newsfragments/1867.feature b/towncrier/newsfragments/1867.feature
new file mode 100644
index 00000000..fbd3a7d7
--- /dev/null
+++ b/towncrier/newsfragments/1867.feature
@@ -0,0 +1 @@
+Changed default value of AUTH_RATELIMIT_SUBNET to false. Increased default value of the rate limit in setup utility (AUTH_RATELIMIT) to a higher value.
From 4cfa2dbc2addc1bab34f5402f1b52df2d89ec536 Mon Sep 17 00:00:00 2001
From: Diman0
Date: Fri, 6 Aug 2021 16:44:18 +0200
Subject: [PATCH 171/181] Increase width of rate limiting text box.
---
setup/templates/steps/config.html | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/setup/templates/steps/config.html b/setup/templates/steps/config.html
index fee66933..72b83915 100644
--- a/setup/templates/steps/config.html
+++ b/setup/templates/steps/config.html
@@ -50,7 +50,7 @@ Or in plain english: if receivers start to classify your mail as spam, this post
-
/ minute
From a7d99bdedd3decc7dc02609fc070337451302eb2 Mon Sep 17 00:00:00 2001
From: Diman0
Date: Fri, 6 Aug 2021 22:35:37 +0200
Subject: [PATCH 172/181] Update CHANGELOG.md and process towncrier
newsfragments.
---
CHANGELOG.md | 32 +++++++++++++++++++++++++---
towncrier/newsfragments/1660.bugfix | 1 -
towncrier/newsfragments/1686.bugfix | 1 -
towncrier/newsfragments/1720.bugfix | 2 --
towncrier/newsfragments/1783.misc | 1 -
towncrier/newsfragments/1837.bugfix | 1 -
towncrier/newsfragments/1841.feature | 1 -
towncrier/newsfragments/1845.feature | 1 -
towncrier/newsfragments/1857.doc | 1 -
towncrier/newsfragments/1861.bugfix | 1 -
towncrier/newsfragments/1867.feature | 1 -
towncrier/newsfragments/1874.bugfix | 1 -
towncrier/newsfragments/1880.feature | 1 -
towncrier/newsfragments/191.bugfix | 1 -
14 files changed, 29 insertions(+), 17 deletions(-)
delete mode 100644 towncrier/newsfragments/1660.bugfix
delete mode 100644 towncrier/newsfragments/1686.bugfix
delete mode 100644 towncrier/newsfragments/1720.bugfix
delete mode 100644 towncrier/newsfragments/1783.misc
delete mode 100644 towncrier/newsfragments/1837.bugfix
delete mode 100644 towncrier/newsfragments/1841.feature
delete mode 100644 towncrier/newsfragments/1845.feature
delete mode 100644 towncrier/newsfragments/1857.doc
delete mode 100644 towncrier/newsfragments/1861.bugfix
delete mode 100644 towncrier/newsfragments/1867.feature
delete mode 100644 towncrier/newsfragments/1874.bugfix
delete mode 100644 towncrier/newsfragments/1880.feature
delete mode 100644 towncrier/newsfragments/191.bugfix
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 579f3e82..09b9f68f 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,18 +4,44 @@ Changelog
Upgrade should run fine as long as you generate a new compose or stack
configuration and upgrade your mailu.env.
-Please note that the current 1.8 is what we call a "soft release": It’s there for everyone to see and use, but to limit possible user-impact of this very big release, it’s not yet the default in the setup-utility for new users. When upgrading, please treat it with some care, and be sure to always have backups!
-
There are some changes to the configuration overrides. Override files are now mounted read-only into the containers.
The Dovecot and Postfix overrides are moved in their own sub-directory.
If there are local override files, they will need to be moved from overrides/ to overrides/dovecot and overrides/postfix/.
See https://mailu.io/1.8/faq.html#how-can-i-override-settings for all the mappings.
+<<<<<<< HEAD
Please note that the shipped image for PostgreSQL database is deprecated.
We advise to switch to an external database server.
+=======
+One major change for the docker compose file is that the antispam needs a fixed hostname [#1837](https://github.com/Mailu/Mailu/issues/1837).
+This is handled when you regenerate the docker-compose file. A fixed hostname is required to retain rspamd history.
+
+Please not that the shipped image for PostgreSQL database is deprecated.
+We advise to switch to an external PostgreSQL database server.
+>>>>>>> afaacf5a... Update CHANGELOG.md and process towncrier newsfragments.
-v1.8.0 - 2020-09-28
+1.8.0 - 2021-08-06
+--------------------
+
+- Features: Update version of roundcube webmail and carddav plugin. This is a security update. ([#1841](https://github.com/Mailu/Mailu/issues/1841))
+- Features: Update version of rainloop webmail to 1.16.0. This is a security update. ([#1845](https://github.com/Mailu/Mailu/issues/1845))
+- Features: Changed default value of AUTH_RATELIMIT_SUBNET to false. Increased default value of the rate limit in setup utility (AUTH_RATELIMIT) to a higher value. ([#1867](https://github.com/Mailu/Mailu/issues/1867))
+- Features: Update jquery used in setup. Set pinned versions in requirements.txt for setup. This is a security update. ([#1880](https://github.com/Mailu/Mailu/issues/1880))
+- Bugfixes: Replace PUBLIC_HOSTNAME and PUBLIC_IP in "Received" headers to ensure that no undue spam points are attributed ([#191](https://github.com/Mailu/Mailu/issues/191))
+- Bugfixes: Don't replace nested headers (typically in attached emails) ([#1660](https://github.com/Mailu/Mailu/issues/1660))
+- Bugfixes: Fix letsencrypt access to certbot for the mail-letsencrypt flavour ([#1686](https://github.com/Mailu/Mailu/issues/1686))
+- Bugfixes: Fix CVE-2020-25275 and CVE-2020-24386 by using alpine 3.13 for
+ dovecot which contains a fixed dovecot version. ([#1720](https://github.com/Mailu/Mailu/issues/1720))
+- Bugfixes: Antispam service now uses a static hostname. Rspamd history is only retained when the service has a fixed hostname. ([#1837](https://github.com/Mailu/Mailu/issues/1837))
+- Bugfixes: Fix a bug preventing colons from being used in passwords when using radicale/webdav. ([#1861](https://github.com/Mailu/Mailu/issues/1861))
+- Bugfixes: Remove dot in blueprint name to prevent critical flask startup error in setup. ([#1874](https://github.com/Mailu/Mailu/issues/1874))
+- Bugfixes: fix punycode encoding of domain names ([#1891](https://github.com/Mailu/Mailu/issues/1891))
+- Improved Documentation: Update fail2ban documentation to use systemd backend instead of filepath for journald ([#1857](https://github.com/Mailu/Mailu/issues/1857))
+- Misc: ([#1783](https://github.com/Mailu/Mailu/issues/1783))
+
+
+v1.8.0rc - 2020-09-28
--------------------
- Features: Add support for backward-forwarding using SRS ([#328](https://github.com/Mailu/Mailu/issues/328))
diff --git a/towncrier/newsfragments/1660.bugfix b/towncrier/newsfragments/1660.bugfix
deleted file mode 100644
index a90fb099..00000000
--- a/towncrier/newsfragments/1660.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Don't replace nested headers (typically in attached emails)
diff --git a/towncrier/newsfragments/1686.bugfix b/towncrier/newsfragments/1686.bugfix
deleted file mode 100644
index 932d7d7c..00000000
--- a/towncrier/newsfragments/1686.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Fix letsencrypt access to certbot for the mail-letsencrypt flavour
diff --git a/towncrier/newsfragments/1720.bugfix b/towncrier/newsfragments/1720.bugfix
deleted file mode 100644
index 0bf2b8e6..00000000
--- a/towncrier/newsfragments/1720.bugfix
+++ /dev/null
@@ -1,2 +0,0 @@
-Fix CVE-2020-25275 and CVE-2020-24386 by using alpine 3.13 for
-dovecot which contains a fixed dovecot version.
diff --git a/towncrier/newsfragments/1783.misc b/towncrier/newsfragments/1783.misc
deleted file mode 100644
index 2ee4c97f..00000000
--- a/towncrier/newsfragments/1783.misc
+++ /dev/null
@@ -1 +0,0 @@
-Switch from client side sessions (cookies) to server-side sessions (Redis). This simplies the security model a lot and allows for an easier recovery should a cookie ever land in the hands of an attacker.
diff --git a/towncrier/newsfragments/1837.bugfix b/towncrier/newsfragments/1837.bugfix
deleted file mode 100644
index dcabcc6b..00000000
--- a/towncrier/newsfragments/1837.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Antispam service now uses a static hostname. Rspamd history is only retained when the service has a fixed hostname.
diff --git a/towncrier/newsfragments/1841.feature b/towncrier/newsfragments/1841.feature
deleted file mode 100644
index c91f805f..00000000
--- a/towncrier/newsfragments/1841.feature
+++ /dev/null
@@ -1 +0,0 @@
-Update version of roundcube webmail and carddav plugin. This is a security update.
\ No newline at end of file
diff --git a/towncrier/newsfragments/1845.feature b/towncrier/newsfragments/1845.feature
deleted file mode 100644
index afde9313..00000000
--- a/towncrier/newsfragments/1845.feature
+++ /dev/null
@@ -1 +0,0 @@
-Update version of rainloop webmail to 1.16.0. This is a security update.
diff --git a/towncrier/newsfragments/1857.doc b/towncrier/newsfragments/1857.doc
deleted file mode 100644
index 06cb91ab..00000000
--- a/towncrier/newsfragments/1857.doc
+++ /dev/null
@@ -1 +0,0 @@
-Update fail2ban documentation to use systemd backend instead of filepath for journald
\ No newline at end of file
diff --git a/towncrier/newsfragments/1861.bugfix b/towncrier/newsfragments/1861.bugfix
deleted file mode 100644
index 1e28d1b6..00000000
--- a/towncrier/newsfragments/1861.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Fix a bug preventing colons from being used in passwords when using radicale/webdav.
diff --git a/towncrier/newsfragments/1867.feature b/towncrier/newsfragments/1867.feature
deleted file mode 100644
index fbd3a7d7..00000000
--- a/towncrier/newsfragments/1867.feature
+++ /dev/null
@@ -1 +0,0 @@
-Changed default value of AUTH_RATELIMIT_SUBNET to false. Increased default value of the rate limit in setup utility (AUTH_RATELIMIT) to a higher value.
diff --git a/towncrier/newsfragments/1874.bugfix b/towncrier/newsfragments/1874.bugfix
deleted file mode 100644
index a301835e..00000000
--- a/towncrier/newsfragments/1874.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Remove dot in blueprint name to prevent critical flask startup error in setup.
diff --git a/towncrier/newsfragments/1880.feature b/towncrier/newsfragments/1880.feature
deleted file mode 100644
index 212dc906..00000000
--- a/towncrier/newsfragments/1880.feature
+++ /dev/null
@@ -1 +0,0 @@
-Update jquery used in setup. Set pinned versions in requirements.txt for setup. This is a security update.
diff --git a/towncrier/newsfragments/191.bugfix b/towncrier/newsfragments/191.bugfix
deleted file mode 100644
index 185d3074..00000000
--- a/towncrier/newsfragments/191.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Replace PUBLIC_HOSTNAME and PUBLIC_IP in "Received" headers to ensure that no undue spam points are attributed
From 4b89143362d9ef8cfb985c030047ce74642f7952 Mon Sep 17 00:00:00 2001
From: Diman0
Date: Fri, 6 Aug 2021 23:00:27 +0200
Subject: [PATCH 173/181] Update documentation config and release notes page.
---
docs/conf.py | 2 +-
docs/releases.rst | 65 +++++++++++++++++++++++++++++++++++++++++++++--
2 files changed, 64 insertions(+), 3 deletions(-)
diff --git a/docs/conf.py b/docs/conf.py
index 8f174b64..db7008b3 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -36,7 +36,7 @@ html_context = {
'github_user': 'mailu',
'github_repo': 'mailu',
'github_version': version,
- 'stable_version': '1.7',
+ 'stable_version': '1.8',
'versions': [
('1.5', '/1.5/'),
('1.6', '/1.6/'),
diff --git a/docs/releases.rst b/docs/releases.rst
index 7a15d1fa..7473b033 100644
--- a/docs/releases.rst
+++ b/docs/releases.rst
@@ -1,8 +1,69 @@
Release notes
=============
-Mailu 1.8 - 2020-10-02
-----------------------
+Mailu 1.8 - 2021-08-7
+---------------------
+
+The full 1.8 release is finally ready. There have been some changes in the contributors team. Many people from the contributors team have stepped back due to changed priorities in their life.
+We are very grateful for all their contributions and hope we will see them back again in the future.
+This is the main reason why it took so long for 1.8 to be fully released.
+
+Fortunately more people have decided to join the project. Some very nice contributions have been made which will become part of the next 1.9 release.
+We hope that future Mailu releases will be released more quickly now we have more active contributors again.
+
+For a list of all changes refer to `CHANGELOG.md` in the root folder of the Mailu github project. Please read the 'Override location changes' section further on this page. It contains important information for the people who use the overrides folder.
+
+New Functionality & Improvements
+````````````````````````````````
+
+Here’s a short summary of new features:
+
+- Roundcube and Rainloop have been updated.
+- All dependencies have been updated to the latest security update.
+- Fail2ban documentation has been improved.
+- Switch from client side (cookie) sessions to server side sessions.
+- Full-text-search is back after having been disabled for a while due to nasty bugs. It can still be disabled via the mailu.env file.
+- Tons of documentation improvements, especially geared towards new users.
+- (Experimental) support for different architectures, such as ARM.
+- Improvements around webmails, such as CardDAV, GPG and a new skin for an updated roundcube, and support for MySQL for it. Updated Rainloop, too.
+- Improvements around relaying, such as AUTH LOGIN and non-standard port support.
+- Update to alpine:3.14 as baseimage for most containers.
+- Setup warns users about compose-IPv6 deployments which have caused open relays in the past.
+- Improved handling of upper-vs-lowercase aliases and user-addresses.
+- Improved rate-limiting system.
+- Support for SRS.
+- Japanese localisation is now available.
+
+
+Upgrading
+`````````
+
+Upgrade should run fine as long as you generate a new compose or stack
+configuration and upgrade your mailu.env.
+
+Please not that the shipped image for PostgreSQL database is deprecated.
+The shipped image for PostgreSQL is not maintained anymore from release 1.8.
+We recommend switching to an external PostgreSQL image as soon as possible.
+
+Override location changes
+^^^^^^^^^^^^^^^^^^^^^^^^^
+
+If you have regenerated the Docker compose and environment files, there are some changes to the configuration overrides.
+Override files are now mounted read-only into the containers. The Dovecot and Postfix overrides are moved in their own sub-directory. If there are local override files, they will need to be moved from ``overrides/`` to ``overrides/dovecot`` and ``overrides/postfix/``.
+
+Update your DNS SPF Records
+^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+It has become known that the SPF DNS records generated by the admin interface are not completely standard compliant anymore. Please check the DNS records for your domains and compare them to what the new admin-interface instructs you to use. In most cases, this should be a simple copy-paste operation for you ….
+
+Fixed hostname for antispam service
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+For history to be retained in Rspamd, the antispam container requires a static hostname. When you re-generate your docker-compose.yml file (or helm-chart), this will be covered.
+
+
+Mailu 1.8rc - 2020-10-02
+------------------------
Release 1.8 has come a long way again. Due to corona the project slowed down to a crawl. Fortunately new contributors have joined the team what enabled us to still release Mailu 1.8 this year.
From 6581f8f087d62b841e9939ea3cf3814ea9e9a518 Mon Sep 17 00:00:00 2001
From: Dimitri Huisman <52963853+Diman0@users.noreply.github.com>
Date: Fri, 6 Aug 2021 23:17:41 +0200
Subject: [PATCH 174/181] Resolve merge conflict
---
CHANGELOG.md | 12 ++++--------
1 file changed, 4 insertions(+), 8 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 09b9f68f..3ad0061b 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -9,18 +9,14 @@ The Dovecot and Postfix overrides are moved in their own sub-directory.
If there are local override files, they will need to be moved from overrides/ to overrides/dovecot and overrides/postfix/.
See https://mailu.io/1.8/faq.html#how-can-i-override-settings for all the mappings.
-<<<<<<< HEAD
-Please note that the shipped image for PostgreSQL database is deprecated.
-We advise to switch to an external database server.
-=======
-One major change for the docker compose file is that the antispam needs a fixed hostname [#1837](https://github.com/Mailu/Mailu/issues/1837).
-This is handled when you regenerate the docker-compose file. A fixed hostname is required to retain rspamd history.
+One major change for the docker compose file is that the antispam container needs a fixed hostname [#1837](https://github.com/Mailu/Mailu/issues/1837).
+This is handled when you regenerate the docker-compose file. A fixed hostname is required to retain rspamd history.
+This is also handled in the helm-chart repo.
Please not that the shipped image for PostgreSQL database is deprecated.
We advise to switch to an external PostgreSQL database server.
->>>>>>> afaacf5a... Update CHANGELOG.md and process towncrier newsfragments.
-
+
1.8.0 - 2021-08-06
--------------------
From f0997ed0fd5b3be5e8a6964bfd3197ced1aa5ef9 Mon Sep 17 00:00:00 2001
From: Diman0
Date: Sat, 7 Aug 2021 09:12:43 +0200
Subject: [PATCH 175/181] Improved changelog entry
---
CHANGELOG.md | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 3ad0061b..0a128163 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -27,7 +27,7 @@ We advise to switch to an external PostgreSQL database server.
- Bugfixes: Replace PUBLIC_HOSTNAME and PUBLIC_IP in "Received" headers to ensure that no undue spam points are attributed ([#191](https://github.com/Mailu/Mailu/issues/191))
- Bugfixes: Don't replace nested headers (typically in attached emails) ([#1660](https://github.com/Mailu/Mailu/issues/1660))
- Bugfixes: Fix letsencrypt access to certbot for the mail-letsencrypt flavour ([#1686](https://github.com/Mailu/Mailu/issues/1686))
-- Bugfixes: Fix CVE-2020-25275 and CVE-2020-24386 by using alpine 3.13 for
+- Bugfixes: Fix CVE-2020-25275 and CVE-2020-24386 by upgrading alpine for
dovecot which contains a fixed dovecot version. ([#1720](https://github.com/Mailu/Mailu/issues/1720))
- Bugfixes: Antispam service now uses a static hostname. Rspamd history is only retained when the service has a fixed hostname. ([#1837](https://github.com/Mailu/Mailu/issues/1837))
- Bugfixes: Fix a bug preventing colons from being used in passwords when using radicale/webdav. ([#1861](https://github.com/Mailu/Mailu/issues/1861))
From 21e7a338e75437afd59ae4185c87f59d18f8f4e9 Mon Sep 17 00:00:00 2001
From: Diman0
Date: Sat, 7 Aug 2021 09:14:09 +0200
Subject: [PATCH 176/181] Fixed typing error.
---
CHANGELOG.md | 2 +-
docs/releases.rst | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 0a128163..82f04acc 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -13,7 +13,7 @@ One major change for the docker compose file is that the antispam container need
This is handled when you regenerate the docker-compose file. A fixed hostname is required to retain rspamd history.
This is also handled in the helm-chart repo.
-Please not that the shipped image for PostgreSQL database is deprecated.
+Please note that the shipped image for PostgreSQL database is deprecated.
We advise to switch to an external PostgreSQL database server.
diff --git a/docs/releases.rst b/docs/releases.rst
index 7473b033..3ae25f48 100644
--- a/docs/releases.rst
+++ b/docs/releases.rst
@@ -41,7 +41,7 @@ Upgrading
Upgrade should run fine as long as you generate a new compose or stack
configuration and upgrade your mailu.env.
-Please not that the shipped image for PostgreSQL database is deprecated.
+Please note that the shipped image for PostgreSQL database is deprecated.
The shipped image for PostgreSQL is not maintained anymore from release 1.8.
We recommend switching to an external PostgreSQL image as soon as possible.
From 14a18715111e4d21fd9cac0c7e18a293771f3fd0 Mon Sep 17 00:00:00 2001
From: Diman0
Date: Sat, 7 Aug 2021 09:25:40 +0200
Subject: [PATCH 177/181] enhanced security changelog entry and added
recommendation to recreate secret_key
---
CHANGELOG.md | 11 ++++++++++-
docs/releases.rst | 14 +++++++++++++-
2 files changed, 23 insertions(+), 2 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 82f04acc..da945c72 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -13,6 +13,15 @@ One major change for the docker compose file is that the antispam container need
This is handled when you regenerate the docker-compose file. A fixed hostname is required to retain rspamd history.
This is also handled in the helm-chart repo.
+Improvements have been made to protect again session-fixation attacks.
+To be fully protected, it is required to change your SECRET_KEY in Mailu.env after upgrading.
+A new SECRET_KEY is generated when you recreate your docker-compose.yml & mailu.env file via setup.mailu.io.
+
+The SECRET_KEY is an uppercase alphanumeric string of length 16. You can manually create such a string via
+```cat /dev/urandom | tr -dc 'A-Z0-9' | fold -w ${1:-16} | head -n 1```
+
+After changing mailu.env, it is required to recreate all containers for the changes to be propagated.
+
Please note that the shipped image for PostgreSQL database is deprecated.
We advise to switch to an external PostgreSQL database server.
@@ -34,7 +43,7 @@ We advise to switch to an external PostgreSQL database server.
- Bugfixes: Remove dot in blueprint name to prevent critical flask startup error in setup. ([#1874](https://github.com/Mailu/Mailu/issues/1874))
- Bugfixes: fix punycode encoding of domain names ([#1891](https://github.com/Mailu/Mailu/issues/1891))
- Improved Documentation: Update fail2ban documentation to use systemd backend instead of filepath for journald ([#1857](https://github.com/Mailu/Mailu/issues/1857))
-- Misc: ([#1783](https://github.com/Mailu/Mailu/issues/1783))
+- Misc: Switch from client side (cookie) sessions to server side sessions and protect against session-fixation attacks. We recommend that you change your SECRET_KEY after upgrading. ([#1783](https://github.com/Mailu/Mailu/issues/1783))
v1.8.0rc - 2020-09-28
diff --git a/docs/releases.rst b/docs/releases.rst
index 3ae25f48..6c672538 100644
--- a/docs/releases.rst
+++ b/docs/releases.rst
@@ -21,7 +21,7 @@ Here’s a short summary of new features:
- Roundcube and Rainloop have been updated.
- All dependencies have been updated to the latest security update.
- Fail2ban documentation has been improved.
-- Switch from client side (cookie) sessions to server side sessions.
+- Switch from client side (cookie) sessions to server side sessions and protect against session-fixation attacks. We recommend that you change your SECRET_KEY after upgrading.
- Full-text-search is back after having been disabled for a while due to nasty bugs. It can still be disabled via the mailu.env file.
- Tons of documentation improvements, especially geared towards new users.
- (Experimental) support for different architectures, such as ARM.
@@ -51,6 +51,18 @@ Override location changes
If you have regenerated the Docker compose and environment files, there are some changes to the configuration overrides.
Override files are now mounted read-only into the containers. The Dovecot and Postfix overrides are moved in their own sub-directory. If there are local override files, they will need to be moved from ``overrides/`` to ``overrides/dovecot`` and ``overrides/postfix/``.
+Recreate SECRET_KEY after upgrading
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Improvements have been made to protect again session-fixation attacks.
+To be fully protected, it is required to change your SECRET_KEY in Mailu.env after upgrading.
+A new SECRET_KEY is generated when you recreate your docker-compose.yml & mailu.env file via setup.mailu.io.
+
+The SECRET_KEY is an uppercase alphanumeric string of length 16. You can manually create such a string via
+```cat /dev/urandom | tr -dc 'A-Z0-9' | fold -w ${1:-16} | head -n 1```
+
+After changing mailu.env, it is required to recreate all containers for the changes to be propagated.
+
Update your DNS SPF Records
^^^^^^^^^^^^^^^^^^^^^^^^^^^
From 3157fc3623424f7af991f2de86981c2aeced4cca Mon Sep 17 00:00:00 2001
From: Diman0
Date: Sat, 7 Aug 2021 09:27:47 +0200
Subject: [PATCH 178/181] Give docker containers in each test one more minute
for starting.
---
.github/workflows/CI.yml | 12 ++++++------
1 file changed, 6 insertions(+), 6 deletions(-)
diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml
index 19a445b4..e2a535dd 100644
--- a/.github/workflows/CI.yml
+++ b/.github/workflows/CI.yml
@@ -121,7 +121,7 @@ jobs:
- name: Copy all certs
run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*'
- name: Test core suite
- run: python tests/compose/test.py core 1
+ run: python tests/compose/test.py core 2
env:
MAILU_VERSION: ${{ env.MAILU_VERSION }}
TRAVIS_BRANCH: ${{ env.BRANCH }}
@@ -168,7 +168,7 @@ jobs:
- name: Copy all certs
run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*'
- name: Test fetch
- run: python tests/compose/test.py fetchmail 1
+ run: python tests/compose/test.py fetchmail 2
env:
MAILU_VERSION: ${{ env.MAILU_VERSION }}
TRAVIS_BRANCH: ${{ env.BRANCH }}
@@ -215,7 +215,7 @@ jobs:
- name: Copy all certs
run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*'
- name: Test clamvav
- run: python tests/compose/test.py filters 2
+ run: python tests/compose/test.py filters 3
env:
MAILU_VERSION: ${{ env.MAILU_VERSION }}
TRAVIS_BRANCH: ${{ env.BRANCH }}
@@ -262,7 +262,7 @@ jobs:
- name: Copy all certs
run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*'
- name: Test rainloop
- run: python tests/compose/test.py rainloop 1
+ run: python tests/compose/test.py rainloop 2
env:
MAILU_VERSION: ${{ env.MAILU_VERSION }}
TRAVIS_BRANCH: ${{ env.BRANCH }}
@@ -309,7 +309,7 @@ jobs:
- name: Copy all certs
run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*'
- name: Test roundcube
- run: python tests/compose/test.py roundcube 1
+ run: python tests/compose/test.py roundcube 2
env:
MAILU_VERSION: ${{ env.MAILU_VERSION }}
TRAVIS_BRANCH: ${{ env.BRANCH }}
@@ -356,7 +356,7 @@ jobs:
- name: Copy all certs
run: sudo -- sh -c 'mkdir -p /mailu && cp -r tests/certs /mailu && chmod 600 /mailu/certs/*'
- name: Test webdav
- run: python tests/compose/test.py webdav 1
+ run: python tests/compose/test.py webdav 2
env:
MAILU_VERSION: ${{ env.MAILU_VERSION }}
TRAVIS_BRANCH: ${{ env.BRANCH }}
From 6b3c208fc9e4166b45c7ef21eacf75a86239bf24 Mon Sep 17 00:00:00 2001
From: Erriez
Date: Sun, 8 Aug 2021 14:50:20 +0200
Subject: [PATCH 179/181] Update Alpine version from 3.10 to 3.14
---
tests/build_arm.sh | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/tests/build_arm.sh b/tests/build_arm.sh
index 04836ddb..32dba421 100755
--- a/tests/build_arm.sh
+++ b/tests/build_arm.sh
@@ -1,6 +1,6 @@
#!/bin/bash -x
-ALPINE_VER="3.10"
+ALPINE_VER="3.14"
DISTRO="balenalib/rpi-alpine:$ALPINE_VER"
# Used for webmails
QEMU="arm"
From bcdc137677efef597ff8729ca40626924ee042f9 Mon Sep 17 00:00:00 2001
From: Florent Daigniere
Date: Sun, 8 Aug 2021 19:18:33 +0200
Subject: [PATCH 180/181] Alpine has removed support for btree and hash
---
core/postfix/conf/main.cf | 4 ++--
core/postfix/conf/sasl_passwd | 3 ++-
2 files changed, 4 insertions(+), 3 deletions(-)
diff --git a/core/postfix/conf/main.cf b/core/postfix/conf/main.cf
index 8f35f609..9cd4010e 100644
--- a/core/postfix/conf/main.cf
+++ b/core/postfix/conf/main.cf
@@ -32,7 +32,7 @@ mydestination =
relayhost = {{ RELAYHOST }}
{% if RELAYUSER %}
smtp_sasl_auth_enable = yes
-smtp_sasl_password_maps = hash:/etc/postfix/sasl_passwd
+smtp_sasl_password_maps = lmdb:/etc/postfix/sasl_passwd
smtp_sasl_security_options = noanonymous
{% endif %}
@@ -58,7 +58,7 @@ tls_ssl_options = NO_COMPRESSION
smtp_tls_security_level = {{ OUTBOUND_TLS_LEVEL|default('may') }}
smtp_tls_mandatory_protocols = !SSLv2, !SSLv3
smtp_tls_protocols =!SSLv2,!SSLv3
-smtp_tls_session_cache_database = btree:${data_directory}/smtp_scache
+smtp_tls_session_cache_database = lmdb:${data_directory}/smtp_scache
###############
# Virtual
diff --git a/core/postfix/conf/sasl_passwd b/core/postfix/conf/sasl_passwd
index e19d0657..1e32322a 100644
--- a/core/postfix/conf/sasl_passwd
+++ b/core/postfix/conf/sasl_passwd
@@ -1 +1,2 @@
-{{ RELAYHOST }} {{ RELAYUSER }}:{{ RELAYPASSWORD }}
\ No newline at end of file
+{{ RELAYHOST }} {{ RELAYUSER }}:{{ RELAYPASSWORD }}
+
From 1029cad04807371ac11133a43d95307005c003dc Mon Sep 17 00:00:00 2001
From: Florent Daigniere
Date: Sun, 8 Aug 2021 19:21:55 +0200
Subject: [PATCH 181/181] towncrier
---
towncrier/newsfragments/1917.bugfix | 1 +
1 file changed, 1 insertion(+)
create mode 100644 towncrier/newsfragments/1917.bugfix
diff --git a/towncrier/newsfragments/1917.bugfix b/towncrier/newsfragments/1917.bugfix
new file mode 100644
index 00000000..68187d61
--- /dev/null
+++ b/towncrier/newsfragments/1917.bugfix
@@ -0,0 +1 @@
+Alpine has removed support for btree and hash in postfix... please use lmdb instead