mirror of
https://github.com/Mailu/Mailu.git
synced 2025-03-11 14:49:19 +02:00
new import/export using marshmallow
This commit is contained in:
parent
1c9abf6e48
commit
68caf50154
@ -4,7 +4,6 @@
|
|||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
import socket
|
import socket
|
||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
@ -20,7 +19,7 @@ from flask.cli import FlaskGroup, with_appcontext
|
|||||||
from marshmallow.exceptions import ValidationError
|
from marshmallow.exceptions import ValidationError
|
||||||
|
|
||||||
from . import models
|
from . import models
|
||||||
from .schemas import MailuSchema, get_schema
|
from .schemas import MailuSchema, get_schema, get_fieldspec, colorize, RenderJSON, HIDDEN
|
||||||
|
|
||||||
|
|
||||||
db = models.db
|
db = models.db
|
||||||
@ -182,7 +181,7 @@ def user_import(localpart, domain_name, password_hash, hash_scheme = None):
|
|||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
|
|
||||||
# TODO: remove this deprecated function
|
# TODO: remove deprecated config_update function?
|
||||||
@mailu.command()
|
@mailu.command()
|
||||||
@click.option('-v', '--verbose')
|
@click.option('-v', '--verbose')
|
||||||
@click.option('-d', '--delete-objects')
|
@click.option('-d', '--delete-objects')
|
||||||
@ -324,17 +323,16 @@ def config_update(verbose=False, delete_objects=False):
|
|||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
|
|
||||||
SECTIONS = {'domains', 'relays', 'users', 'aliases'}
|
|
||||||
|
|
||||||
|
|
||||||
@mailu.command()
|
@mailu.command()
|
||||||
@click.option('-v', '--verbose', count=True, help='Increase verbosity')
|
@click.option('-v', '--verbose', count=True, help='Increase verbosity.')
|
||||||
@click.option('-q', '--quiet', is_flag=True, help='Quiet mode - only show errors')
|
@click.option('-s', '--secrets', is_flag=True, help='Show secret attributes in messages.')
|
||||||
@click.option('-u', '--update', is_flag=True, help='Update mode - merge input with existing config')
|
@click.option('-q', '--quiet', is_flag=True, help='Quiet mode - only show errors.')
|
||||||
@click.option('-n', '--dry-run', is_flag=True, help='Perform a trial run with no changes made')
|
@click.option('-c', '--color', is_flag=True, help='Force colorized output.')
|
||||||
|
@click.option('-u', '--update', is_flag=True, help='Update mode - merge input with existing config.')
|
||||||
|
@click.option('-n', '--dry-run', is_flag=True, help='Perform a trial run with no changes made.')
|
||||||
@click.argument('source', metavar='[FILENAME|-]', type=click.File(mode='r'), default=sys.stdin)
|
@click.argument('source', metavar='[FILENAME|-]', type=click.File(mode='r'), default=sys.stdin)
|
||||||
@with_appcontext
|
@with_appcontext
|
||||||
def config_import(verbose=0, quiet=False, update=False, dry_run=False, source=None):
|
def config_import(verbose=0, secrets=False, quiet=False, color=False, update=False, dry_run=False, source=None):
|
||||||
""" Import configuration as YAML or JSON from stdin or file
|
""" Import configuration as YAML or JSON from stdin or file
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -344,12 +342,19 @@ def config_import(verbose=0, quiet=False, update=False, dry_run=False, source=No
|
|||||||
# 2 : also show secrets
|
# 2 : also show secrets
|
||||||
# 3 : also show input data
|
# 3 : also show input data
|
||||||
# 4 : also show sql queries
|
# 4 : also show sql queries
|
||||||
|
# 5 : also show tracebacks
|
||||||
|
|
||||||
if quiet:
|
if quiet:
|
||||||
verbose = -1
|
verbose = -1
|
||||||
|
|
||||||
|
color_cfg = {
|
||||||
|
'color': color or sys.stdout.isatty(),
|
||||||
|
'lexer': 'python',
|
||||||
|
'strip': True,
|
||||||
|
}
|
||||||
|
|
||||||
counter = Counter()
|
counter = Counter()
|
||||||
dumper = {}
|
logger = {}
|
||||||
|
|
||||||
def format_errors(store, path=None):
|
def format_errors(store, path=None):
|
||||||
|
|
||||||
@ -387,19 +392,26 @@ def config_import(verbose=0, quiet=False, update=False, dry_run=False, source=No
|
|||||||
last = action
|
last = action
|
||||||
changes.append(f'{what}({count})')
|
changes.append(f'{what}({count})')
|
||||||
else:
|
else:
|
||||||
changes = 'no changes.'
|
changes = ['No changes.']
|
||||||
return chain(message, changes)
|
return chain(message, changes)
|
||||||
|
|
||||||
def log(action, target, message=None):
|
def log(action, target, message=None):
|
||||||
if message is None:
|
if message is None:
|
||||||
message = json.dumps(dumper[target.__class__].dump(target), ensure_ascii=False)
|
# TODO: convert nested OrderedDict to dict
|
||||||
print(f'{action} {target.__table__}: {message}')
|
# see: flask mailu config-import -nvv yaml/dump4.yaml
|
||||||
|
try:
|
||||||
|
message = dict(logger[target.__class__].dump(target))
|
||||||
|
except KeyError:
|
||||||
|
message = target
|
||||||
|
if not isinstance(message, str):
|
||||||
|
message = repr(message)
|
||||||
|
print(f'{action} {target.__table__}: {colorize(message, **color_cfg)}')
|
||||||
|
|
||||||
def listen_insert(mapper, connection, target): # pylint: disable=unused-argument
|
def listen_insert(mapper, connection, target): # pylint: disable=unused-argument
|
||||||
""" callback function to track import """
|
""" callback function to track import """
|
||||||
counter.update([('Added', target.__table__.name)])
|
counter.update([('Created', target.__table__.name)])
|
||||||
if verbose >= 1:
|
if verbose >= 1:
|
||||||
log('Added', target)
|
log('Created', target)
|
||||||
|
|
||||||
def listen_update(mapper, connection, target): # pylint: disable=unused-argument
|
def listen_update(mapper, connection, target): # pylint: disable=unused-argument
|
||||||
""" callback function to track import """
|
""" callback function to track import """
|
||||||
@ -407,32 +419,32 @@ def config_import(verbose=0, quiet=False, update=False, dry_run=False, source=No
|
|||||||
changed = {}
|
changed = {}
|
||||||
inspection = sqlalchemy.inspect(target)
|
inspection = sqlalchemy.inspect(target)
|
||||||
for attr in sqlalchemy.orm.class_mapper(target.__class__).column_attrs:
|
for attr in sqlalchemy.orm.class_mapper(target.__class__).column_attrs:
|
||||||
if getattr(inspection.attrs, attr.key).history.has_changes():
|
history = getattr(inspection.attrs, attr.key).history
|
||||||
if sqlalchemy.orm.attributes.get_history(target, attr.key)[2]:
|
if history.has_changes() and history.deleted:
|
||||||
before = sqlalchemy.orm.attributes.get_history(target, attr.key)[2].pop()
|
before = history.deleted[-1]
|
||||||
after = getattr(target, attr.key)
|
after = getattr(target, attr.key)
|
||||||
# only remember changed keys
|
# TODO: remove special handling of "comment" after modifying model
|
||||||
if before != after and (before or after):
|
if attr.key == 'comment' and not before and not after:
|
||||||
if verbose >= 1:
|
pass
|
||||||
changed[str(attr.key)] = (before, after)
|
# only remember changed keys
|
||||||
else:
|
elif before != after:
|
||||||
break
|
if verbose >= 1:
|
||||||
|
changed[str(attr.key)] = (before, after)
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
|
||||||
if verbose >= 1:
|
if verbose >= 1:
|
||||||
# use schema with dump_context to hide secrets and sort keys
|
# use schema with dump_context to hide secrets and sort keys
|
||||||
primary = json.dumps(str(target), ensure_ascii=False)
|
dumped = get_schema(target)(only=changed.keys(), context=diff_context).dump(target)
|
||||||
dumped = get_schema(target)(only=changed.keys(), context=dump_context).dump(target)
|
|
||||||
for key, value in dumped.items():
|
for key, value in dumped.items():
|
||||||
before, after = changed[key]
|
before, after = changed[key]
|
||||||
if value == '<hidden>':
|
if value == HIDDEN:
|
||||||
before = '<hidden>' if before else before
|
before = HIDDEN if before else before
|
||||||
after = '<hidden>' if after else after
|
after = HIDDEN if after else after
|
||||||
else:
|
else:
|
||||||
# TODO: use schema to "convert" before value?
|
# TODO: need to use schema to "convert" before value?
|
||||||
after = value
|
after = value
|
||||||
before = json.dumps(before, ensure_ascii=False)
|
log('Modified', target, f'{str(target)!r} {key}: {before!r} -> {after!r}')
|
||||||
after = json.dumps(after, ensure_ascii=False)
|
|
||||||
log('Modified', target, f'{primary} {key}: {before} -> {after}')
|
|
||||||
|
|
||||||
if changed:
|
if changed:
|
||||||
counter.update([('Modified', target.__table__.name)])
|
counter.update([('Modified', target.__table__.name)])
|
||||||
@ -443,47 +455,60 @@ def config_import(verbose=0, quiet=False, update=False, dry_run=False, source=No
|
|||||||
if verbose >= 1:
|
if verbose >= 1:
|
||||||
log('Deleted', target)
|
log('Deleted', target)
|
||||||
|
|
||||||
# this listener should not be necessary, when:
|
# TODO: this listener will not be necessary, if dkim keys would be stored in database
|
||||||
# dkim keys should be stored in database and it should be possible to store multiple
|
_dedupe_dkim = set()
|
||||||
# keys per domain. the active key would be also stored on disk on commit.
|
|
||||||
def listen_dkim(session, flush_context): # pylint: disable=unused-argument
|
def listen_dkim(session, flush_context): # pylint: disable=unused-argument
|
||||||
""" callback function to track import """
|
""" callback function to track import """
|
||||||
for target in session.identity_map.values():
|
for target in session.identity_map.values():
|
||||||
if not isinstance(target, models.Domain):
|
# look at Domains originally loaded from db
|
||||||
|
if not isinstance(target, models.Domain) or not target._sa_instance_state.load_path:
|
||||||
continue
|
continue
|
||||||
primary = json.dumps(str(target), ensure_ascii=False)
|
|
||||||
before = target._dkim_key_on_disk
|
before = target._dkim_key_on_disk
|
||||||
after = target._dkim_key
|
after = target._dkim_key
|
||||||
if before != after and (before or after):
|
if before != after:
|
||||||
if verbose >= 2:
|
if secrets:
|
||||||
before = before.decode('ascii', 'ignore')
|
before = before.decode('ascii', 'ignore')
|
||||||
after = after.decode('ascii', 'ignore')
|
after = after.decode('ascii', 'ignore')
|
||||||
else:
|
else:
|
||||||
before = '<hidden>' if before else ''
|
before = HIDDEN if before else ''
|
||||||
after = '<hidden>' if after else ''
|
after = HIDDEN if after else ''
|
||||||
before = json.dumps(before, ensure_ascii=False)
|
# "de-dupe" messages; this event is fired at every flush
|
||||||
after = json.dumps(after, ensure_ascii=False)
|
if not (target, before, after) in _dedupe_dkim:
|
||||||
log('Modified', target, f'{primary} dkim_key: {before} -> {after}')
|
_dedupe_dkim.add((target, before, after))
|
||||||
counter.update([('Modified', target.__table__.name)])
|
counter.update([('Modified', target.__table__.name)])
|
||||||
|
if verbose >= 1:
|
||||||
|
log('Modified', target, f'{str(target)!r} dkim_key: {before!r} -> {after!r}')
|
||||||
|
|
||||||
def track_serialize(self, item):
|
def track_serialize(obj, item):
|
||||||
""" callback function to track import """
|
""" callback function to track import """
|
||||||
log('Handling', self.opts.model, item)
|
# hide secrets
|
||||||
|
data = logger[obj.opts.model].hide(item)
|
||||||
|
if 'hash_password' in data:
|
||||||
|
data['password'] = HIDDEN
|
||||||
|
if 'fetches' in data:
|
||||||
|
for fetch in data['fetches']:
|
||||||
|
fetch['password'] = HIDDEN
|
||||||
|
log('Handling', obj.opts.model, data)
|
||||||
|
|
||||||
# configure contexts
|
# configure contexts
|
||||||
dump_context = {
|
diff_context = {
|
||||||
'secrets': verbose >= 2,
|
'full': True,
|
||||||
|
'secrets': secrets,
|
||||||
|
}
|
||||||
|
log_context = {
|
||||||
|
'secrets': secrets,
|
||||||
}
|
}
|
||||||
load_context = {
|
load_context = {
|
||||||
'callback': track_serialize if verbose >= 3 else None,
|
|
||||||
'clear': not update,
|
|
||||||
'import': True,
|
'import': True,
|
||||||
|
'update': update,
|
||||||
|
'clear': not update,
|
||||||
|
'callback': track_serialize if verbose >= 2 else None,
|
||||||
}
|
}
|
||||||
|
|
||||||
# register listeners
|
# register listeners
|
||||||
for schema in get_schema():
|
for schema in get_schema():
|
||||||
model = schema.Meta.model
|
model = schema.Meta.model
|
||||||
dumper[model] = schema(context=dump_context)
|
logger[model] = schema(context=log_context)
|
||||||
sqlalchemy.event.listen(model, 'after_insert', listen_insert)
|
sqlalchemy.event.listen(model, 'after_insert', listen_insert)
|
||||||
sqlalchemy.event.listen(model, 'after_update', listen_update)
|
sqlalchemy.event.listen(model, 'after_update', listen_update)
|
||||||
sqlalchemy.event.listen(model, 'after_delete', listen_delete)
|
sqlalchemy.event.listen(model, 'after_delete', listen_delete)
|
||||||
@ -491,18 +516,24 @@ def config_import(verbose=0, quiet=False, update=False, dry_run=False, source=No
|
|||||||
# special listener for dkim_key changes
|
# special listener for dkim_key changes
|
||||||
sqlalchemy.event.listen(db.session, 'after_flush', listen_dkim)
|
sqlalchemy.event.listen(db.session, 'after_flush', listen_dkim)
|
||||||
|
|
||||||
if verbose >= 4:
|
if verbose >= 3:
|
||||||
logging.basicConfig()
|
logging.basicConfig()
|
||||||
logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)
|
logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with models.db.session.no_autoflush:
|
with models.db.session.no_autoflush:
|
||||||
config = MailuSchema(only=SECTIONS, context=load_context).loads(source)
|
config = MailuSchema(only=MailuSchema.Meta.order, context=load_context).loads(source)
|
||||||
except ValidationError as exc:
|
except ValidationError as exc:
|
||||||
raise click.ClickException(format_errors(exc.messages)) from exc
|
raise click.ClickException(format_errors(exc.messages)) from exc
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
# (yaml.scanner.ScannerError, UnicodeDecodeError, ...)
|
if verbose >= 5:
|
||||||
raise click.ClickException(f'[{exc.__class__.__name__}] {" ".join(str(exc).split())}') from exc
|
raise
|
||||||
|
else:
|
||||||
|
# (yaml.scanner.ScannerError, UnicodeDecodeError, ...)
|
||||||
|
raise click.ClickException(
|
||||||
|
f'[{exc.__class__.__name__}] '
|
||||||
|
f'{" ".join(str(exc).split())}'
|
||||||
|
) from exc
|
||||||
|
|
||||||
# flush session to show/count all changes
|
# flush session to show/count all changes
|
||||||
if dry_run or verbose >= 1:
|
if dry_run or verbose >= 1:
|
||||||
@ -510,53 +541,47 @@ def config_import(verbose=0, quiet=False, update=False, dry_run=False, source=No
|
|||||||
|
|
||||||
# check for duplicate domain names
|
# check for duplicate domain names
|
||||||
dup = set()
|
dup = set()
|
||||||
for fqdn in chain(db.session.query(models.Domain.name),
|
for fqdn in chain(
|
||||||
db.session.query(models.Alternative.name),
|
db.session.query(models.Domain.name),
|
||||||
db.session.query(models.Relay.name)):
|
db.session.query(models.Alternative.name),
|
||||||
|
db.session.query(models.Relay.name)
|
||||||
|
):
|
||||||
if fqdn in dup:
|
if fqdn in dup:
|
||||||
raise click.ClickException(f'[ValidationError] Duplicate domain name: {fqdn}')
|
raise click.ClickException(f'[ValidationError] Duplicate domain name: {fqdn}')
|
||||||
dup.add(fqdn)
|
dup.add(fqdn)
|
||||||
|
|
||||||
# TODO: implement special update "items"
|
|
||||||
# -pkey: which - remove item "which"
|
|
||||||
# -key: null or [] or {} - set key to default
|
|
||||||
# -pkey: null or [] or {} - remove all existing items in this list
|
|
||||||
|
|
||||||
# don't commit when running dry
|
# don't commit when running dry
|
||||||
if dry_run:
|
if dry_run:
|
||||||
db.session.rollback()
|
|
||||||
if not quiet:
|
if not quiet:
|
||||||
print(*format_changes('Dry run. Not commiting changes.'))
|
print(*format_changes('Dry run. Not commiting changes.'))
|
||||||
# TODO: remove debug
|
db.session.rollback()
|
||||||
print(MailuSchema().dumps(config))
|
|
||||||
else:
|
else:
|
||||||
db.session.commit()
|
|
||||||
if not quiet:
|
if not quiet:
|
||||||
print(*format_changes('Commited changes.'))
|
print(*format_changes('Committing changes.'))
|
||||||
|
db.session.commit()
|
||||||
|
|
||||||
|
|
||||||
@mailu.command()
|
@mailu.command()
|
||||||
@click.option('-f', '--full', is_flag=True, help='Include attributes with default value')
|
@click.option('-f', '--full', is_flag=True, help='Include attributes with default value.')
|
||||||
@click.option('-s', '--secrets', is_flag=True,
|
@click.option('-s', '--secrets', is_flag=True,
|
||||||
help='Include secret attributes (dkim-key, passwords)')
|
help='Include secret attributes (dkim-key, passwords).')
|
||||||
@click.option('-d', '--dns', is_flag=True, help='Include dns records')
|
@click.option('-c', '--color', is_flag=True, help='Force colorized output.')
|
||||||
|
@click.option('-d', '--dns', is_flag=True, help='Include dns records.')
|
||||||
@click.option('-o', '--output-file', 'output', default=sys.stdout, type=click.File(mode='w'),
|
@click.option('-o', '--output-file', 'output', default=sys.stdout, type=click.File(mode='w'),
|
||||||
help='save yaml to file')
|
help='Save configuration to file.')
|
||||||
@click.option('-j', '--json', 'as_json', is_flag=True, help='Dump in josn format')
|
@click.option('-j', '--json', 'as_json', is_flag=True, help='Export configuration in json format.')
|
||||||
@click.argument('sections', nargs=-1)
|
@click.argument('only', metavar='[FILTER]...', nargs=-1)
|
||||||
@with_appcontext
|
@with_appcontext
|
||||||
def config_export(full=False, secrets=False, dns=False, output=None, as_json=False, sections=None):
|
def config_export(full=False, secrets=False, color=False, dns=False, output=None, as_json=False, only=None):
|
||||||
""" Export configuration as YAML or JSON to stdout or file
|
""" Export configuration as YAML or JSON to stdout or file
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if sections:
|
if only:
|
||||||
for section in sections:
|
for spec in only:
|
||||||
if section not in SECTIONS:
|
if spec.split('.', 1)[0] not in MailuSchema.Meta.order:
|
||||||
print(f'[ERROR] Unknown section: {section}')
|
raise click.ClickException(f'[ERROR] Unknown section: {spec}')
|
||||||
raise click.exceptions.Exit(1)
|
|
||||||
sections = set(sections)
|
|
||||||
else:
|
else:
|
||||||
sections = SECTIONS
|
only = MailuSchema.Meta.order
|
||||||
|
|
||||||
context = {
|
context = {
|
||||||
'full': full,
|
'full': full,
|
||||||
@ -564,13 +589,20 @@ def config_export(full=False, secrets=False, dns=False, output=None, as_json=Fal
|
|||||||
'dns': dns,
|
'dns': dns,
|
||||||
}
|
}
|
||||||
|
|
||||||
if as_json:
|
schema = MailuSchema(only=only, context=context)
|
||||||
schema = MailuSchema(only=sections, context=context)
|
color_cfg = {'color': color or output.isatty()}
|
||||||
schema.opts.render_module = json
|
|
||||||
print(schema.dumps(models.MailuConfig(), separators=(',',':')), file=output)
|
|
||||||
|
|
||||||
else:
|
if as_json:
|
||||||
MailuSchema(only=sections, context=context).dumps(models.MailuConfig(), output)
|
schema.opts.render_module = RenderJSON
|
||||||
|
color_cfg['lexer'] = 'json'
|
||||||
|
color_cfg['strip'] = True
|
||||||
|
|
||||||
|
try:
|
||||||
|
print(colorize(schema.dumps(models.MailuConfig()), **color_cfg), file=output)
|
||||||
|
except ValueError as exc:
|
||||||
|
if spec := get_fieldspec(exc):
|
||||||
|
raise click.ClickException(f'[ERROR] Invalid filter: {spec}') from exc
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
@mailu.command()
|
@mailu.command()
|
||||||
|
@ -19,6 +19,7 @@ import dns
|
|||||||
|
|
||||||
from flask import current_app as app
|
from flask import current_app as app
|
||||||
from sqlalchemy.ext import declarative
|
from sqlalchemy.ext import declarative
|
||||||
|
from sqlalchemy.ext.hybrid import hybrid_property
|
||||||
from sqlalchemy.inspection import inspect
|
from sqlalchemy.inspection import inspect
|
||||||
from werkzeug.utils import cached_property
|
from werkzeug.utils import cached_property
|
||||||
|
|
||||||
@ -121,6 +122,36 @@ class Base(db.Model):
|
|||||||
updated_at = db.Column(db.Date, nullable=True, onupdate=date.today)
|
updated_at = db.Column(db.Date, nullable=True, onupdate=date.today)
|
||||||
comment = db.Column(db.String(255), nullable=True, default='')
|
comment = db.Column(db.String(255), nullable=True, default='')
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
pkey = self.__table__.primary_key.columns.values()[0].name
|
||||||
|
if pkey == 'email':
|
||||||
|
# ugly hack for email declared attr. _email is not always up2date
|
||||||
|
return str(f'{self.localpart}@{self.domain_name}')
|
||||||
|
elif pkey in {'name', 'email'}:
|
||||||
|
return str(getattr(self, pkey, None))
|
||||||
|
else:
|
||||||
|
return self.__repr__()
|
||||||
|
return str(getattr(self, self.__table__.primary_key.columns.values()[0].name))
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f'<{self.__class__.__name__} {str(self)!r}>'
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
if isinstance(other, self.__class__):
|
||||||
|
pkey = self.__table__.primary_key.columns.values()[0].name
|
||||||
|
this = getattr(self, pkey, None)
|
||||||
|
other = getattr(other, pkey, None)
|
||||||
|
return this is not None and other is not None and str(this) == str(other)
|
||||||
|
else:
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
def __hash__(self):
|
||||||
|
primary = getattr(self, self.__table__.primary_key.columns.values()[0].name)
|
||||||
|
if primary is None:
|
||||||
|
return NotImplemented
|
||||||
|
else:
|
||||||
|
return hash(primary)
|
||||||
|
|
||||||
|
|
||||||
# Many-to-many association table for domain managers
|
# Many-to-many association table for domain managers
|
||||||
managers = db.Table('manager', Base.metadata,
|
managers = db.Table('manager', Base.metadata,
|
||||||
@ -261,19 +292,6 @@ class Domain(Base):
|
|||||||
except dns.exception.DNSException:
|
except dns.exception.DNSException:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return str(self.name)
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
if isinstance(other, self.__class__):
|
|
||||||
return str(self.name) == str(other.name)
|
|
||||||
else:
|
|
||||||
return NotImplemented
|
|
||||||
|
|
||||||
def __hash__(self):
|
|
||||||
return hash(str(self.name))
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class Alternative(Base):
|
class Alternative(Base):
|
||||||
""" Alternative name for a served domain.
|
""" Alternative name for a served domain.
|
||||||
@ -287,9 +305,6 @@ class Alternative(Base):
|
|||||||
domain = db.relationship(Domain,
|
domain = db.relationship(Domain,
|
||||||
backref=db.backref('alternatives', cascade='all, delete-orphan'))
|
backref=db.backref('alternatives', cascade='all, delete-orphan'))
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return str(self.name)
|
|
||||||
|
|
||||||
|
|
||||||
class Relay(Base):
|
class Relay(Base):
|
||||||
""" Relayed mail domain.
|
""" Relayed mail domain.
|
||||||
@ -302,9 +317,6 @@ class Relay(Base):
|
|||||||
# TODO: String(80) is too small?
|
# TODO: String(80) is too small?
|
||||||
smtp = db.Column(db.String(80), nullable=True)
|
smtp = db.Column(db.String(80), nullable=True)
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return str(self.name)
|
|
||||||
|
|
||||||
|
|
||||||
class Email(object):
|
class Email(object):
|
||||||
""" Abstraction for an email address (localpart and domain).
|
""" Abstraction for an email address (localpart and domain).
|
||||||
@ -312,11 +324,11 @@ class Email(object):
|
|||||||
|
|
||||||
# TODO: validate max. total length of address (<=254)
|
# TODO: validate max. total length of address (<=254)
|
||||||
|
|
||||||
# TODO: String(80) is too large (>64)?
|
# TODO: String(80) is too large (64)?
|
||||||
localpart = db.Column(db.String(80), nullable=False)
|
localpart = db.Column(db.String(80), nullable=False)
|
||||||
|
|
||||||
@declarative.declared_attr
|
@declarative.declared_attr
|
||||||
def domain_name(self):
|
def domain_name(cls):
|
||||||
""" the domain part of the email address """
|
""" the domain part of the email address """
|
||||||
return db.Column(IdnaDomain, db.ForeignKey(Domain.name),
|
return db.Column(IdnaDomain, db.ForeignKey(Domain.name),
|
||||||
nullable=False, default=IdnaDomain)
|
nullable=False, default=IdnaDomain)
|
||||||
@ -325,13 +337,33 @@ class Email(object):
|
|||||||
# It is however very useful for quick lookups without joining tables,
|
# It is however very useful for quick lookups without joining tables,
|
||||||
# especially when the mail server is reading the database.
|
# especially when the mail server is reading the database.
|
||||||
@declarative.declared_attr
|
@declarative.declared_attr
|
||||||
def email(self):
|
def _email(cls):
|
||||||
""" the complete email address (localpart@domain) """
|
""" the complete email address (localpart@domain) """
|
||||||
updater = lambda ctx: '{localpart}@{domain_name}'.format(**ctx.current_parameters)
|
|
||||||
return db.Column(IdnaEmail,
|
def updater(ctx):
|
||||||
primary_key=True, nullable=False,
|
key = f'{cls.__tablename__}_email'
|
||||||
default=updater
|
if key in ctx.current_parameters:
|
||||||
)
|
return ctx.current_parameters[key]
|
||||||
|
return '{localpart}@{domain_name}'.format(**ctx.current_parameters)
|
||||||
|
|
||||||
|
return db.Column('email', IdnaEmail, primary_key=True, nullable=False, onupdate=updater)
|
||||||
|
|
||||||
|
# We need to keep email, localpart and domain_name in sync.
|
||||||
|
# But IMHO using email as primary key was not a good idea in the first place.
|
||||||
|
@hybrid_property
|
||||||
|
def email(self):
|
||||||
|
""" getter for email - gets _email """
|
||||||
|
return self._email
|
||||||
|
|
||||||
|
@email.setter
|
||||||
|
def email(self, value):
|
||||||
|
""" setter for email - sets _email, localpart and domain_name at once """
|
||||||
|
self.localpart, self.domain_name = value.rsplit('@', 1)
|
||||||
|
self._email = value
|
||||||
|
|
||||||
|
# hack for email declared attr - when _email is not updated yet
|
||||||
|
def __str__(self):
|
||||||
|
return str(f'{self.localpart}@{self.domain_name}')
|
||||||
|
|
||||||
def sendmail(self, subject, body):
|
def sendmail(self, subject, body):
|
||||||
""" send an email to the address """
|
""" send an email to the address """
|
||||||
@ -391,9 +423,6 @@ class Email(object):
|
|||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return str(self.email)
|
|
||||||
|
|
||||||
|
|
||||||
class User(Base, Email):
|
class User(Base, Email):
|
||||||
""" A user is an email address that has a password to access a mailbox.
|
""" A user is an email address that has a password to access a mailbox.
|
||||||
@ -435,12 +464,10 @@ class User(Base, Email):
|
|||||||
is_active = True
|
is_active = True
|
||||||
is_anonymous = False
|
is_anonymous = False
|
||||||
|
|
||||||
# TODO: remove unused user.get_id()
|
|
||||||
def get_id(self):
|
def get_id(self):
|
||||||
""" return users email address """
|
""" return users email address """
|
||||||
return self.email
|
return self.email
|
||||||
|
|
||||||
# TODO: remove unused user.destination
|
|
||||||
@property
|
@property
|
||||||
def destination(self):
|
def destination(self):
|
||||||
""" returns comma separated string of destinations """
|
""" returns comma separated string of destinations """
|
||||||
@ -471,17 +498,20 @@ class User(Base, Email):
|
|||||||
'CRYPT': 'des_crypt',
|
'CRYPT': 'des_crypt',
|
||||||
}
|
}
|
||||||
|
|
||||||
def _get_password_context(self):
|
@classmethod
|
||||||
|
def get_password_context(cls):
|
||||||
|
""" Create password context for hashing and verification
|
||||||
|
"""
|
||||||
return passlib.context.CryptContext(
|
return passlib.context.CryptContext(
|
||||||
schemes=self.scheme_dict.values(),
|
schemes=cls.scheme_dict.values(),
|
||||||
default=self.scheme_dict[app.config['PASSWORD_SCHEME']],
|
default=cls.scheme_dict[app.config['PASSWORD_SCHEME']],
|
||||||
)
|
)
|
||||||
|
|
||||||
def check_password(self, plain):
|
def check_password(self, plain):
|
||||||
""" Check password against stored hash
|
""" Check password against stored hash
|
||||||
Update hash when default scheme has changed
|
Update hash when default scheme has changed
|
||||||
"""
|
"""
|
||||||
context = self._get_password_context()
|
context = self.get_password_context()
|
||||||
hashed = re.match('^({[^}]+})?(.*)$', self.password).group(2)
|
hashed = re.match('^({[^}]+})?(.*)$', self.password).group(2)
|
||||||
result = context.verify(plain, hashed)
|
result = context.verify(plain, hashed)
|
||||||
if result and context.identify(hashed) != context.default_scheme():
|
if result and context.identify(hashed) != context.default_scheme():
|
||||||
@ -490,8 +520,6 @@ class User(Base, Email):
|
|||||||
db.session.commit()
|
db.session.commit()
|
||||||
return result
|
return result
|
||||||
|
|
||||||
# TODO: remove kwarg hash_scheme - there is no point in setting a scheme,
|
|
||||||
# when the next check updates the password to the default scheme.
|
|
||||||
def set_password(self, new, hash_scheme=None, raw=False):
|
def set_password(self, new, hash_scheme=None, raw=False):
|
||||||
""" Set password for user with specified encryption scheme
|
""" Set password for user with specified encryption scheme
|
||||||
@new: plain text password to encrypt (or, if raw is True: the hash itself)
|
@new: plain text password to encrypt (or, if raw is True: the hash itself)
|
||||||
@ -500,7 +528,7 @@ class User(Base, Email):
|
|||||||
if hash_scheme is None:
|
if hash_scheme is None:
|
||||||
hash_scheme = app.config['PASSWORD_SCHEME']
|
hash_scheme = app.config['PASSWORD_SCHEME']
|
||||||
if not raw:
|
if not raw:
|
||||||
new = self._get_password_context().encrypt(new, self.scheme_dict[hash_scheme])
|
new = self.get_password_context().encrypt(new, self.scheme_dict[hash_scheme])
|
||||||
self.password = f'{{{hash_scheme}}}{new}'
|
self.password = f'{{{hash_scheme}}}{new}'
|
||||||
|
|
||||||
def get_managed_domains(self):
|
def get_managed_domains(self):
|
||||||
@ -593,7 +621,7 @@ class Alias(Base, Email):
|
|||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# TODO: what about API tokens?
|
|
||||||
class Token(Base):
|
class Token(Base):
|
||||||
""" A token is an application password for a given user.
|
""" A token is an application password for a given user.
|
||||||
"""
|
"""
|
||||||
@ -606,20 +634,19 @@ class Token(Base):
|
|||||||
user = db.relationship(User,
|
user = db.relationship(User,
|
||||||
backref=db.backref('tokens', cascade='all, delete-orphan'))
|
backref=db.backref('tokens', cascade='all, delete-orphan'))
|
||||||
password = db.Column(db.String(255), nullable=False)
|
password = db.Column(db.String(255), nullable=False)
|
||||||
# TODO: String(80) is too large?
|
# TODO: String(255) is too large? (43 should be sufficient)
|
||||||
ip = db.Column(db.String(255))
|
ip = db.Column(db.String(255))
|
||||||
|
|
||||||
def check_password(self, password):
|
def check_password(self, password):
|
||||||
""" verifies password against stored hash """
|
""" verifies password against stored hash """
|
||||||
return passlib.hash.sha256_crypt.verify(password, self.password)
|
return passlib.hash.sha256_crypt.verify(password, self.password)
|
||||||
|
|
||||||
# TODO: use crypt context and default scheme from config?
|
|
||||||
def set_password(self, password):
|
def set_password(self, password):
|
||||||
""" sets password using sha256_crypt(rounds=1000) """
|
""" sets password using sha256_crypt(rounds=1000) """
|
||||||
self.password = passlib.hash.sha256_crypt.using(rounds=1000).hash(password)
|
self.password = passlib.hash.sha256_crypt.using(rounds=1000).hash(password)
|
||||||
|
|
||||||
def __str__(self):
|
def __repr__(self):
|
||||||
return str(self.comment or self.ip)
|
return f'<Token #{self.id}: {self.comment or self.ip or self.password}>'
|
||||||
|
|
||||||
|
|
||||||
class Fetch(Base):
|
class Fetch(Base):
|
||||||
@ -644,8 +671,11 @@ class Fetch(Base):
|
|||||||
last_check = db.Column(db.DateTime, nullable=True)
|
last_check = db.Column(db.DateTime, nullable=True)
|
||||||
error = db.Column(db.String(1023), nullable=True)
|
error = db.Column(db.String(1023), nullable=True)
|
||||||
|
|
||||||
def __str__(self):
|
def __repr__(self):
|
||||||
return f'{self.protocol}{"s" if self.tls else ""}://{self.username}@{self.host}:{self.port}'
|
return (
|
||||||
|
f'<Fetch #{self.id}: {self.protocol}{"s" if self.tls else ""}:'
|
||||||
|
f'//{self.username}@{self.host}:{self.port}>'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class MailuConfig:
|
class MailuConfig:
|
||||||
@ -661,7 +691,7 @@ class MailuConfig:
|
|||||||
def __init__(self, model : db.Model):
|
def __init__(self, model : db.Model):
|
||||||
self.model = model
|
self.model = model
|
||||||
|
|
||||||
def __str__(self):
|
def __repr__(self):
|
||||||
return f'<{self.model.__name__}-Collection>'
|
return f'<{self.model.__name__}-Collection>'
|
||||||
|
|
||||||
@cached_property
|
@cached_property
|
||||||
@ -837,8 +867,8 @@ class MailuConfig:
|
|||||||
if models is None or model in models:
|
if models is None or model in models:
|
||||||
db.session.query(model).delete()
|
db.session.query(model).delete()
|
||||||
|
|
||||||
domains = MailuCollection(Domain)
|
domain = MailuCollection(Domain)
|
||||||
relays = MailuCollection(Relay)
|
user = MailuCollection(User)
|
||||||
users = MailuCollection(User)
|
alias = MailuCollection(Alias)
|
||||||
aliases = MailuCollection(Alias)
|
relay = MailuCollection(Relay)
|
||||||
config = MailuCollection(Config)
|
config = MailuCollection(Config)
|
||||||
|
@ -1,27 +1,66 @@
|
|||||||
""" Mailu marshmallow fields and schema
|
""" Mailu marshmallow fields and schema
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import re
|
from copy import deepcopy
|
||||||
|
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
from textwrap import wrap
|
from textwrap import wrap
|
||||||
|
|
||||||
|
import re
|
||||||
|
import json
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
|
import sqlalchemy
|
||||||
|
|
||||||
from marshmallow import pre_load, post_load, post_dump, fields, Schema
|
from marshmallow import pre_load, post_load, post_dump, fields, Schema
|
||||||
|
from marshmallow.utils import ensure_text_type
|
||||||
from marshmallow.exceptions import ValidationError
|
from marshmallow.exceptions import ValidationError
|
||||||
from marshmallow_sqlalchemy import SQLAlchemyAutoSchemaOpts
|
from marshmallow_sqlalchemy import SQLAlchemyAutoSchemaOpts
|
||||||
|
|
||||||
from flask_marshmallow import Marshmallow
|
from flask_marshmallow import Marshmallow
|
||||||
|
|
||||||
from OpenSSL import crypto
|
from OpenSSL import crypto
|
||||||
|
|
||||||
|
try:
|
||||||
|
from pygments import highlight
|
||||||
|
from pygments.token import Token
|
||||||
|
from pygments.lexers import get_lexer_by_name
|
||||||
|
from pygments.lexers.data import YamlLexer
|
||||||
|
from pygments.formatters import get_formatter_by_name
|
||||||
|
except ModuleNotFoundError:
|
||||||
|
COLOR_SUPPORTED = False
|
||||||
|
else:
|
||||||
|
COLOR_SUPPORTED = True
|
||||||
|
|
||||||
from . import models, dkim
|
from . import models, dkim
|
||||||
|
|
||||||
|
|
||||||
ma = Marshmallow()
|
ma = Marshmallow()
|
||||||
|
|
||||||
# TODO: how and where to mark keys as "required" while unserializing (on commandline, in api)?
|
# TODO: how and where to mark keys as "required" while unserializing in api?
|
||||||
# - fields without default => required
|
# - when modifying, nothing is required (only the primary key, but this key is in the uri)
|
||||||
# - fields which are the primary key => unchangeable when updating
|
# - the primary key from post data must not differ from the key in the uri
|
||||||
|
# - when creating all fields without default or auto-increment are required
|
||||||
|
# TODO: what about deleting list items and prung lists?
|
||||||
|
# - domain.alternatives, user.forward_destination, user.manager_of, aliases.destination
|
||||||
|
# TODO: validate everything!
|
||||||
|
|
||||||
|
|
||||||
|
### class for hidden values ###
|
||||||
|
|
||||||
|
class _Hidden:
|
||||||
|
def __bool__(self):
|
||||||
|
return False
|
||||||
|
def __copy__(self):
|
||||||
|
return self
|
||||||
|
def __deepcopy__(self, _):
|
||||||
|
return self
|
||||||
|
def __eq__(self, other):
|
||||||
|
return str(other) == '<hidden>'
|
||||||
|
def __repr__(self):
|
||||||
|
return '<hidden>'
|
||||||
|
__str__ = __repr__
|
||||||
|
|
||||||
|
HIDDEN = _Hidden()
|
||||||
|
|
||||||
|
|
||||||
### map model to schema ###
|
### map model to schema ###
|
||||||
@ -41,13 +80,90 @@ def mapped(cls):
|
|||||||
return cls
|
return cls
|
||||||
|
|
||||||
|
|
||||||
### yaml render module ###
|
### helper functions ###
|
||||||
|
|
||||||
|
def get_fieldspec(exc):
|
||||||
|
""" walk traceback to extract spec of invalid field from marshmallow """
|
||||||
|
path = []
|
||||||
|
tbck = exc.__traceback__
|
||||||
|
while tbck:
|
||||||
|
if tbck.tb_frame.f_code.co_name == '_serialize':
|
||||||
|
if 'attr' in tbck.tb_frame.f_locals:
|
||||||
|
path.append(tbck.tb_frame.f_locals['attr'])
|
||||||
|
elif tbck.tb_frame.f_code.co_name == '_init_fields':
|
||||||
|
path = '.'.join(path)
|
||||||
|
spec = ', '.join([f'{path}.{key}' for key in tbck.tb_frame.f_locals['invalid_fields']])
|
||||||
|
return spec
|
||||||
|
tbck = tbck.tb_next
|
||||||
|
return None
|
||||||
|
|
||||||
|
def colorize(data, lexer='yaml', formatter='terminal', color=None, strip=False):
|
||||||
|
""" add ANSI color to data """
|
||||||
|
if color is None:
|
||||||
|
# autodetect colorize
|
||||||
|
color = COLOR_SUPPORTED
|
||||||
|
if not color:
|
||||||
|
# no color wanted
|
||||||
|
return data
|
||||||
|
if not COLOR_SUPPORTED:
|
||||||
|
# want color, but not supported
|
||||||
|
raise ValueError('Please install pygments to colorize output')
|
||||||
|
|
||||||
|
scheme = {
|
||||||
|
Token: ('', ''),
|
||||||
|
Token.Name.Tag: ('cyan', 'brightcyan'),
|
||||||
|
Token.Literal.Scalar: ('green', 'green'),
|
||||||
|
Token.Literal.String: ('green', 'green'),
|
||||||
|
Token.Keyword.Constant: ('magenta', 'brightmagenta'),
|
||||||
|
Token.Literal.Number: ('magenta', 'brightmagenta'),
|
||||||
|
Token.Error: ('red', 'brightred'),
|
||||||
|
Token.Name: ('red', 'brightred'),
|
||||||
|
Token.Operator: ('red', 'brightred'),
|
||||||
|
}
|
||||||
|
|
||||||
|
class MyYamlLexer(YamlLexer):
|
||||||
|
""" colorize yaml constants and integers """
|
||||||
|
def get_tokens(self, text, unfiltered=False):
|
||||||
|
for typ, value in super().get_tokens(text, unfiltered):
|
||||||
|
if typ is Token.Literal.Scalar.Plain:
|
||||||
|
if value in {'true', 'false', 'null'}:
|
||||||
|
typ = Token.Keyword.Constant
|
||||||
|
elif value == HIDDEN:
|
||||||
|
typ = Token.Error
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
int(value, 10)
|
||||||
|
except ValueError:
|
||||||
|
try:
|
||||||
|
float(value)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
typ = Token.Literal.Number.Float
|
||||||
|
else:
|
||||||
|
typ = Token.Literal.Number.Integer
|
||||||
|
yield typ, value
|
||||||
|
|
||||||
|
res = highlight(
|
||||||
|
data,
|
||||||
|
MyYamlLexer() if lexer == 'yaml' else get_lexer_by_name(lexer),
|
||||||
|
get_formatter_by_name(formatter, colorscheme=scheme)
|
||||||
|
)
|
||||||
|
|
||||||
|
return res.rstrip('\n') if strip else res
|
||||||
|
|
||||||
|
|
||||||
|
### render modules ###
|
||||||
|
|
||||||
# allow yaml module to dump OrderedDict
|
# allow yaml module to dump OrderedDict
|
||||||
yaml.add_representer(
|
yaml.add_representer(
|
||||||
OrderedDict,
|
OrderedDict,
|
||||||
lambda cls, data: cls.represent_mapping('tag:yaml.org,2002:map', data.items())
|
lambda cls, data: cls.represent_mapping('tag:yaml.org,2002:map', data.items())
|
||||||
)
|
)
|
||||||
|
yaml.add_representer(
|
||||||
|
_Hidden,
|
||||||
|
lambda cls, data: cls.represent_data(str(data))
|
||||||
|
)
|
||||||
|
|
||||||
class RenderYAML:
|
class RenderYAML:
|
||||||
""" Marshmallow YAML Render Module
|
""" Marshmallow YAML Render Module
|
||||||
@ -67,19 +183,19 @@ class RenderYAML:
|
|||||||
return super().increase_indent(flow, False)
|
return super().increase_indent(flow, False)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _update_items(dict1, dict2):
|
def _augment(kwargs, defaults):
|
||||||
""" sets missing keys in dict1 to values of dict2
|
""" add default kv's to kwargs if missing
|
||||||
"""
|
"""
|
||||||
for key, value in dict2.items():
|
for key, value in defaults.items():
|
||||||
if key not in dict1:
|
if key not in kwargs:
|
||||||
dict1[key] = value
|
kwargs[key] = value
|
||||||
|
|
||||||
_load_defaults = {}
|
_load_defaults = {}
|
||||||
@classmethod
|
@classmethod
|
||||||
def loads(cls, *args, **kwargs):
|
def loads(cls, *args, **kwargs):
|
||||||
""" load yaml data from string
|
""" load yaml data from string
|
||||||
"""
|
"""
|
||||||
cls._update_items(kwargs, cls._load_defaults)
|
cls._augment(kwargs, cls._load_defaults)
|
||||||
return yaml.safe_load(*args, **kwargs)
|
return yaml.safe_load(*args, **kwargs)
|
||||||
|
|
||||||
_dump_defaults = {
|
_dump_defaults = {
|
||||||
@ -90,13 +206,52 @@ class RenderYAML:
|
|||||||
}
|
}
|
||||||
@classmethod
|
@classmethod
|
||||||
def dumps(cls, *args, **kwargs):
|
def dumps(cls, *args, **kwargs):
|
||||||
""" dump yaml data to string
|
""" dump data to yaml string
|
||||||
"""
|
"""
|
||||||
cls._update_items(kwargs, cls._dump_defaults)
|
cls._augment(kwargs, cls._dump_defaults)
|
||||||
return yaml.dump(*args, **kwargs)
|
return yaml.dump(*args, **kwargs)
|
||||||
|
|
||||||
|
class JSONEncoder(json.JSONEncoder):
|
||||||
|
""" JSONEncoder supporting serialization of HIDDEN """
|
||||||
|
def default(self, o):
|
||||||
|
""" serialize HIDDEN """
|
||||||
|
if isinstance(o, _Hidden):
|
||||||
|
return str(o)
|
||||||
|
return json.JSONEncoder.default(self, o)
|
||||||
|
|
||||||
### field definitions ###
|
class RenderJSON:
|
||||||
|
""" Marshmallow JSON Render Module
|
||||||
|
"""
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _augment(kwargs, defaults):
|
||||||
|
""" add default kv's to kwargs if missing
|
||||||
|
"""
|
||||||
|
for key, value in defaults.items():
|
||||||
|
if key not in kwargs:
|
||||||
|
kwargs[key] = value
|
||||||
|
|
||||||
|
_load_defaults = {}
|
||||||
|
@classmethod
|
||||||
|
def loads(cls, *args, **kwargs):
|
||||||
|
""" load json data from string
|
||||||
|
"""
|
||||||
|
cls._augment(kwargs, cls._load_defaults)
|
||||||
|
return json.loads(*args, **kwargs)
|
||||||
|
|
||||||
|
_dump_defaults = {
|
||||||
|
'separators': (',',':'),
|
||||||
|
'cls': JSONEncoder,
|
||||||
|
}
|
||||||
|
@classmethod
|
||||||
|
def dumps(cls, *args, **kwargs):
|
||||||
|
""" dump data to json string
|
||||||
|
"""
|
||||||
|
cls._augment(kwargs, cls._dump_defaults)
|
||||||
|
return json.dumps(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
### custom fields ###
|
||||||
|
|
||||||
class LazyStringField(fields.String):
|
class LazyStringField(fields.String):
|
||||||
""" Field that serializes a "false" value to the empty string
|
""" Field that serializes a "false" value to the empty string
|
||||||
@ -107,9 +262,8 @@ class LazyStringField(fields.String):
|
|||||||
"""
|
"""
|
||||||
return value if value else ''
|
return value if value else ''
|
||||||
|
|
||||||
|
|
||||||
class CommaSeparatedListField(fields.Raw):
|
class CommaSeparatedListField(fields.Raw):
|
||||||
""" Field that deserializes a string containing comma-separated values to
|
""" Deserialize a string containing comma-separated values to
|
||||||
a list of strings
|
a list of strings
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -129,10 +283,15 @@ class CommaSeparatedListField(fields.Raw):
|
|||||||
|
|
||||||
|
|
||||||
class DkimKeyField(fields.String):
|
class DkimKeyField(fields.String):
|
||||||
""" Field that serializes a dkim key to a list of strings (lines) and
|
""" Serialize a dkim key to a list of strings (lines) and
|
||||||
deserializes a string or list of strings.
|
Deserialize a string or list of strings to a valid dkim key
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
default_error_messages = {
|
||||||
|
"invalid": "Not a valid string or list.",
|
||||||
|
"invalid_utf8": "Not a valid utf-8 string or list.",
|
||||||
|
}
|
||||||
|
|
||||||
_clean_re = re.compile(
|
_clean_re = re.compile(
|
||||||
r'(^-----BEGIN (RSA )?PRIVATE KEY-----|-----END (RSA )?PRIVATE KEY-----$|\s+)',
|
r'(^-----BEGIN (RSA )?PRIVATE KEY-----|-----END (RSA )?PRIVATE KEY-----$|\s+)',
|
||||||
flags=re.UNICODE
|
flags=re.UNICODE
|
||||||
@ -156,11 +315,19 @@ class DkimKeyField(fields.String):
|
|||||||
|
|
||||||
# convert list to str
|
# convert list to str
|
||||||
if isinstance(value, list):
|
if isinstance(value, list):
|
||||||
value = ''.join(value)
|
try:
|
||||||
|
value = ''.join([ensure_text_type(item) for item in value])
|
||||||
|
except UnicodeDecodeError as exc:
|
||||||
|
raise self.make_error("invalid_utf8") from exc
|
||||||
|
|
||||||
# only strings are allowed
|
# only text is allowed
|
||||||
if not isinstance(value, str):
|
else:
|
||||||
raise ValidationError(f'invalid type {type(value).__name__!r}')
|
if not isinstance(value, (str, bytes)):
|
||||||
|
raise self.make_error("invalid")
|
||||||
|
try:
|
||||||
|
value = ensure_text_type(value)
|
||||||
|
except UnicodeDecodeError as exc:
|
||||||
|
raise self.make_error("invalid_utf8") from exc
|
||||||
|
|
||||||
# clean value (remove whitespace and header/footer)
|
# clean value (remove whitespace and header/footer)
|
||||||
value = self._clean_re.sub('', value.strip())
|
value = self._clean_re.sub('', value.strip())
|
||||||
@ -189,28 +356,53 @@ class DkimKeyField(fields.String):
|
|||||||
else:
|
else:
|
||||||
return value
|
return value
|
||||||
|
|
||||||
|
class PasswordField(fields.Str):
|
||||||
### base definitions ###
|
""" Serialize a hashed password hash by stripping the obsolete {SCHEME}
|
||||||
|
Deserialize a plain password or hashed password into a hashed password
|
||||||
def handle_email(data):
|
|
||||||
""" merge separate localpart and domain to email
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
localpart = 'localpart' in data
|
_hashes = {'PBKDF2', 'BLF-CRYPT', 'SHA512-CRYPT', 'SHA256-CRYPT', 'MD5-CRYPT', 'CRYPT'}
|
||||||
domain = 'domain' in data
|
|
||||||
|
|
||||||
if 'email' in data:
|
def _serialize(self, value, attr, obj, **kwargs):
|
||||||
if localpart or domain:
|
""" strip obsolete {password-hash} when serializing """
|
||||||
raise ValidationError('duplicate email and localpart/domain')
|
# strip scheme spec if in database - it's obsolete
|
||||||
data['localpart'], data['domain_name'] = data['email'].rsplit('@', 1)
|
if value.startswith('{') and (end := value.find('}', 1)) >= 0:
|
||||||
elif localpart and domain:
|
if value[1:end] in self._hashes:
|
||||||
data['domain_name'] = data['domain']
|
return value[end+1:]
|
||||||
del data['domain']
|
return value
|
||||||
data['email'] = f'{data["localpart"]}@{data["domain_name"]}'
|
|
||||||
elif localpart or domain:
|
|
||||||
raise ValidationError('incomplete localpart/domain')
|
|
||||||
|
|
||||||
return data
|
def _deserialize(self, value, attr, data, **kwargs):
|
||||||
|
""" hashes plain password or checks hashed password
|
||||||
|
also strips obsolete {password-hash} when deserializing
|
||||||
|
"""
|
||||||
|
|
||||||
|
# when hashing is requested: use model instance to hash plain password
|
||||||
|
if data.get('hash_password'):
|
||||||
|
# hash password using model instance
|
||||||
|
inst = self.metadata['model']()
|
||||||
|
inst.set_password(value)
|
||||||
|
value = inst.password
|
||||||
|
del inst
|
||||||
|
|
||||||
|
# strip scheme spec when specified - it's obsolete
|
||||||
|
if value.startswith('{') and (end := value.find('}', 1)) >= 0:
|
||||||
|
if value[1:end] in self._hashes:
|
||||||
|
value = value[end+1:]
|
||||||
|
|
||||||
|
# check if algorithm is supported
|
||||||
|
inst = self.metadata['model'](password=value)
|
||||||
|
try:
|
||||||
|
# just check against empty string to see if hash is valid
|
||||||
|
inst.check_password('')
|
||||||
|
except ValueError as exc:
|
||||||
|
# ValueError: hash could not be identified
|
||||||
|
raise ValidationError(f'invalid password hash {value!r}') from exc
|
||||||
|
del inst
|
||||||
|
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
### base schema ###
|
||||||
|
|
||||||
class BaseOpts(SQLAlchemyAutoSchemaOpts):
|
class BaseOpts(SQLAlchemyAutoSchemaOpts):
|
||||||
""" Option class with sqla session
|
""" Option class with sqla session
|
||||||
@ -220,6 +412,8 @@ class BaseOpts(SQLAlchemyAutoSchemaOpts):
|
|||||||
meta.sqla_session = models.db.session
|
meta.sqla_session = models.db.session
|
||||||
if not hasattr(meta, 'ordered'):
|
if not hasattr(meta, 'ordered'):
|
||||||
meta.ordered = True
|
meta.ordered = True
|
||||||
|
if not hasattr(meta, 'sibling'):
|
||||||
|
meta.sibling = False
|
||||||
super(BaseOpts, self).__init__(meta, ordered=ordered)
|
super(BaseOpts, self).__init__(meta, ordered=ordered)
|
||||||
|
|
||||||
class BaseSchema(ma.SQLAlchemyAutoSchema):
|
class BaseSchema(ma.SQLAlchemyAutoSchema):
|
||||||
@ -231,10 +425,15 @@ class BaseSchema(ma.SQLAlchemyAutoSchema):
|
|||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
""" Schema config """
|
""" Schema config """
|
||||||
|
include_by_context = {}
|
||||||
|
exclude_by_value = {}
|
||||||
|
hide_by_context = {}
|
||||||
|
order = []
|
||||||
|
sibling = False
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
|
|
||||||
# context?
|
# get context
|
||||||
context = kwargs.get('context', {})
|
context = kwargs.get('context', {})
|
||||||
flags = {key for key, value in context.items() if value is True}
|
flags = {key for key, value in context.items() if value is True}
|
||||||
|
|
||||||
@ -261,7 +460,7 @@ class BaseSchema(ma.SQLAlchemyAutoSchema):
|
|||||||
|
|
||||||
# exclude default values
|
# exclude default values
|
||||||
if not context.get('full'):
|
if not context.get('full'):
|
||||||
for column in getattr(self.opts, 'model').__table__.columns:
|
for column in self.opts.model.__table__.columns:
|
||||||
if column.name not in exclude:
|
if column.name not in exclude:
|
||||||
self._exclude_by_value.setdefault(column.name, []).append(
|
self._exclude_by_value.setdefault(column.name, []).append(
|
||||||
None if column.default is None else column.default.arg
|
None if column.default is None else column.default.arg
|
||||||
@ -274,45 +473,239 @@ class BaseSchema(ma.SQLAlchemyAutoSchema):
|
|||||||
if not flags & set(need):
|
if not flags & set(need):
|
||||||
self._hide_by_context |= set(what)
|
self._hide_by_context |= set(what)
|
||||||
|
|
||||||
|
# remember primary keys
|
||||||
|
self._primary = self.opts.model.__table__.primary_key.columns.values()[0].name
|
||||||
|
|
||||||
# initialize attribute order
|
# initialize attribute order
|
||||||
if hasattr(self.Meta, 'order'):
|
if hasattr(self.Meta, 'order'):
|
||||||
# use user-defined order
|
# use user-defined order
|
||||||
self._order = list(reversed(getattr(self.Meta, 'order')))
|
self._order = list(reversed(self.Meta.order))
|
||||||
else:
|
else:
|
||||||
# default order is: primary_key + other keys alphabetically
|
# default order is: primary_key + other keys alphabetically
|
||||||
self._order = list(sorted(self.fields.keys()))
|
self._order = list(sorted(self.fields.keys()))
|
||||||
primary = self.opts.model.__table__.primary_key.columns.values()[0].name
|
if self._primary in self._order:
|
||||||
if primary in self._order:
|
self._order.remove(self._primary)
|
||||||
self._order.remove(primary)
|
|
||||||
self._order.reverse()
|
self._order.reverse()
|
||||||
self._order.append(primary)
|
self._order.append(self._primary)
|
||||||
|
|
||||||
# move pre_load hook "_track_import" to the front
|
# move pre_load hook "_track_import" to the front
|
||||||
hooks = self._hooks[('pre_load', False)]
|
hooks = self._hooks[('pre_load', False)]
|
||||||
if '_track_import' in hooks:
|
hooks.remove('_track_import')
|
||||||
hooks.remove('_track_import')
|
hooks.insert(0, '_track_import')
|
||||||
hooks.insert(0, '_track_import')
|
# move pre_load hook "_add_instance" to the end
|
||||||
# and post_load hook "_fooo" to the end
|
hooks.remove('_add_required')
|
||||||
|
hooks.append('_add_required')
|
||||||
|
|
||||||
|
# move post_load hook "_add_instance" to the end
|
||||||
hooks = self._hooks[('post_load', False)]
|
hooks = self._hooks[('post_load', False)]
|
||||||
if '_add_instance' in hooks:
|
hooks.remove('_add_instance')
|
||||||
hooks.remove('_add_instance')
|
hooks.append('_add_instance')
|
||||||
hooks.append('_add_instance')
|
|
||||||
|
def hide(self, data):
|
||||||
|
""" helper method to hide input data for logging """
|
||||||
|
# always returns a copy of data
|
||||||
|
return {
|
||||||
|
key: HIDDEN if key in self._hide_by_context else deepcopy(value)
|
||||||
|
for key, value in data.items()
|
||||||
|
}
|
||||||
|
|
||||||
|
def _call_and_store(self, *args, **kwargs):
|
||||||
|
""" track curent parent field for pruning """
|
||||||
|
self.context['parent_field'] = kwargs['field_name']
|
||||||
|
return super()._call_and_store(*args, **kwargs)
|
||||||
|
|
||||||
|
# this is only needed to work around the declared attr "email" primary key in model
|
||||||
|
def get_instance(self, data):
|
||||||
|
""" lookup item by defined primary key instead of key(s) from model """
|
||||||
|
if self.transient:
|
||||||
|
return None
|
||||||
|
if keys := getattr(self.Meta, 'primary_keys', None):
|
||||||
|
filters = {key: data.get(key) for key in keys}
|
||||||
|
if None not in filters.values():
|
||||||
|
return self.session.query(self.opts.model).filter_by(**filters).first()
|
||||||
|
return super().get_instance(data)
|
||||||
|
|
||||||
|
@pre_load(pass_many=True)
|
||||||
|
def _patch_input(self, items, many, **kwargs): # pylint: disable=unused-argument
|
||||||
|
""" - flush sqla session before serializing a section when requested
|
||||||
|
(make sure all objects that could be referred to later are created)
|
||||||
|
- when in update mode: patch input data before deserialization
|
||||||
|
- handle "prune" and "delete" items
|
||||||
|
- replace values in keys starting with '-' with default
|
||||||
|
"""
|
||||||
|
|
||||||
|
# flush sqla session
|
||||||
|
if not self.Meta.sibling:
|
||||||
|
self.opts.sqla_session.flush()
|
||||||
|
|
||||||
|
# stop early when not updating
|
||||||
|
if not self.context.get('update'):
|
||||||
|
return items
|
||||||
|
|
||||||
|
# patch "delete", "prune" and "default"
|
||||||
|
want_prune = []
|
||||||
|
def patch(count, data, prune):
|
||||||
|
|
||||||
|
# don't allow __delete__ coming from input
|
||||||
|
if '__delete__' in data:
|
||||||
|
raise ValidationError('Unknown field.', f'{count}.__delete__')
|
||||||
|
|
||||||
|
# handle "prune list" and "delete item" (-pkey: none and -pkey: id)
|
||||||
|
for key in data:
|
||||||
|
if key.startswith('-'):
|
||||||
|
if key[1:] == self._primary:
|
||||||
|
# delete or prune
|
||||||
|
if data[key] is None:
|
||||||
|
# prune
|
||||||
|
prune.append(True)
|
||||||
|
return None
|
||||||
|
# mark item for deletion
|
||||||
|
return {key[1:]: data[key], '__delete__': True}
|
||||||
|
|
||||||
|
# handle "set to default value" (-key: none)
|
||||||
|
def set_default(key, value):
|
||||||
|
if not key.startswith('-'):
|
||||||
|
return (key, value)
|
||||||
|
key = key[1:]
|
||||||
|
if not key in self.opts.model.__table__.columns:
|
||||||
|
return (key, None)
|
||||||
|
if value is not None:
|
||||||
|
raise ValidationError(
|
||||||
|
'When resetting to default value must be null.',
|
||||||
|
f'{count}.{key}'
|
||||||
|
)
|
||||||
|
value = self.opts.model.__table__.columns[key].default
|
||||||
|
if value is None:
|
||||||
|
raise ValidationError(
|
||||||
|
'Field has no default value.',
|
||||||
|
f'{count}.{key}'
|
||||||
|
)
|
||||||
|
return (key, value.arg)
|
||||||
|
|
||||||
|
return dict([set_default(key, value) for key, value in data.items()])
|
||||||
|
|
||||||
|
# convert items to "delete" and filter "prune" item
|
||||||
|
items = [
|
||||||
|
item for item in [
|
||||||
|
patch(count, item, want_prune) for count, item in enumerate(items)
|
||||||
|
] if item
|
||||||
|
]
|
||||||
|
|
||||||
|
# prune: determine if existing items in db need to be added or marked for deletion
|
||||||
|
add_items = False
|
||||||
|
del_items = False
|
||||||
|
if self.Meta.sibling:
|
||||||
|
# parent prunes automatically
|
||||||
|
if not want_prune:
|
||||||
|
# no prune requested => add old items
|
||||||
|
add_items = True
|
||||||
|
else:
|
||||||
|
# parent does not prune automatically
|
||||||
|
if want_prune:
|
||||||
|
# prune requested => mark old items for deletion
|
||||||
|
del_items = True
|
||||||
|
|
||||||
|
if add_items or del_items:
|
||||||
|
existing = {item[self._primary] for item in items if self._primary in item}
|
||||||
|
for item in getattr(self.context['parent'], self.context['parent_field']):
|
||||||
|
key = getattr(item, self._primary)
|
||||||
|
if key not in existing:
|
||||||
|
if add_items:
|
||||||
|
items.append({self._primary: key})
|
||||||
|
else:
|
||||||
|
items.append({self._primary: key, '__delete__': True})
|
||||||
|
|
||||||
|
return items
|
||||||
|
|
||||||
@pre_load
|
@pre_load
|
||||||
def _track_import(self, data, many, **kwargs): # pylint: disable=unused-argument
|
def _track_import(self, data, many, **kwargs): # pylint: disable=unused-argument
|
||||||
# TODO: also handle reset, prune and delete in pre_load / post_load hooks!
|
""" call callback function to track import
|
||||||
# print('!!!', repr(data))
|
"""
|
||||||
|
# callback
|
||||||
if callback := self.context.get('callback'):
|
if callback := self.context.get('callback'):
|
||||||
callback(self, data)
|
callback(self, data)
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
@post_load
|
@pre_load
|
||||||
def _add_instance(self, item, many, **kwargs): # pylint: disable=unused-argument
|
def _add_required(self, data, many, **kwargs): # pylint: disable=unused-argument
|
||||||
self.opts.sqla_session.add(item)
|
""" when updating:
|
||||||
|
allow modification of existing items having required attributes
|
||||||
|
by loading existing value from db
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not self.opts.load_instance or not self.context.get('update'):
|
||||||
|
return data
|
||||||
|
|
||||||
|
# stabilize import of auto-increment primary keys (not required),
|
||||||
|
# by matching import data to existing items and setting primary key
|
||||||
|
if not self._primary in data:
|
||||||
|
for item in getattr(self.context['parent'], self.context['parent_field']):
|
||||||
|
existing = self.dump(item, many=False)
|
||||||
|
this = existing.pop(self._primary)
|
||||||
|
if data == existing:
|
||||||
|
instance = item
|
||||||
|
data[self._primary] = this
|
||||||
|
break
|
||||||
|
|
||||||
|
# try to load instance
|
||||||
|
instance = self.instance or self.get_instance(data)
|
||||||
|
if instance is None:
|
||||||
|
|
||||||
|
if '__delete__' in data:
|
||||||
|
# deletion of non-existent item requested
|
||||||
|
raise ValidationError(
|
||||||
|
f'item not found: {data[self._primary]!r}',
|
||||||
|
field_name=f'?.{self._primary}',
|
||||||
|
)
|
||||||
|
|
||||||
|
else:
|
||||||
|
|
||||||
|
if self.context.get('update'):
|
||||||
|
# remember instance as parent for pruning siblings
|
||||||
|
if not self.Meta.sibling:
|
||||||
|
self.context['parent'] = instance
|
||||||
|
# delete instance when marked
|
||||||
|
if '__delete__' in data:
|
||||||
|
self.opts.sqla_session.delete(instance)
|
||||||
|
|
||||||
|
# add attributes required for validation from db
|
||||||
|
# TODO: this will cause validation errors if value from database does not validate
|
||||||
|
for attr_name, field_obj in self.load_fields.items():
|
||||||
|
if field_obj.required and attr_name not in data:
|
||||||
|
data[attr_name] = getattr(instance, attr_name)
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
@post_load(pass_original=True)
|
||||||
|
def _add_instance(self, item, original, many, **kwargs): # pylint: disable=unused-argument
|
||||||
|
""" add new instances to sqla session """
|
||||||
|
|
||||||
|
if item in self.opts.sqla_session:
|
||||||
|
# item was modified
|
||||||
|
if 'hash_password' in original:
|
||||||
|
# stabilize import of passwords to be hashed,
|
||||||
|
# by not re-hashing an unchanged password
|
||||||
|
if attr := getattr(sqlalchemy.inspect(item).attrs, 'password', None):
|
||||||
|
if attr.history.has_changes() and attr.history.deleted:
|
||||||
|
try:
|
||||||
|
# reset password hash, if password was not changed
|
||||||
|
inst = type(item)(password=attr.history.deleted[-1])
|
||||||
|
if inst.check_password(original['password']):
|
||||||
|
item.password = inst.password
|
||||||
|
except ValueError:
|
||||||
|
# hash in db is invalid
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
del inst
|
||||||
|
else:
|
||||||
|
# new item
|
||||||
|
self.opts.sqla_session.add(item)
|
||||||
return item
|
return item
|
||||||
|
|
||||||
@post_dump
|
@post_dump
|
||||||
def _hide_and_order(self, data, many, **kwargs): # pylint: disable=unused-argument
|
def _hide_and_order(self, data, many, **kwargs): # pylint: disable=unused-argument
|
||||||
|
""" hide secrets and order output """
|
||||||
|
|
||||||
# order output
|
# order output
|
||||||
for key in self._order:
|
for key in self._order:
|
||||||
@ -325,15 +718,18 @@ class BaseSchema(ma.SQLAlchemyAutoSchema):
|
|||||||
if not self._exclude_by_value and not self._hide_by_context:
|
if not self._exclude_by_value and not self._hide_by_context:
|
||||||
return data
|
return data
|
||||||
|
|
||||||
# exclude items or hide values
|
# exclude or hide values
|
||||||
full = self.context.get('full')
|
full = self.context.get('full')
|
||||||
return type(data)([
|
return type(data)([
|
||||||
(key, '<hidden>' if key in self._hide_by_context else value)
|
(key, HIDDEN if key in self._hide_by_context else value)
|
||||||
for key, value in data.items()
|
for key, value in data.items()
|
||||||
if full or key not in self._exclude_by_value or value not in self._exclude_by_value[key]
|
if full or key not in self._exclude_by_value or value not in self._exclude_by_value[key]
|
||||||
])
|
])
|
||||||
|
|
||||||
# TODO: remove LazyStringField and change model (IMHO comment should not be nullable)
|
# this field is used to mark items for deletion
|
||||||
|
mark_delete = fields.Boolean(data_key='__delete__', load_only=True)
|
||||||
|
|
||||||
|
# TODO: remove LazyStringField (when model was changed - IMHO comment should not be nullable)
|
||||||
comment = LazyStringField()
|
comment = LazyStringField()
|
||||||
|
|
||||||
|
|
||||||
@ -381,6 +777,11 @@ class TokenSchema(BaseSchema):
|
|||||||
model = models.Token
|
model = models.Token
|
||||||
load_instance = True
|
load_instance = True
|
||||||
|
|
||||||
|
sibling = True
|
||||||
|
|
||||||
|
password = PasswordField(required=True, metadata={'model': models.User})
|
||||||
|
hash_password = fields.Boolean(load_only=True, missing=False)
|
||||||
|
|
||||||
|
|
||||||
@mapped
|
@mapped
|
||||||
class FetchSchema(BaseSchema):
|
class FetchSchema(BaseSchema):
|
||||||
@ -389,6 +790,8 @@ class FetchSchema(BaseSchema):
|
|||||||
""" Schema config """
|
""" Schema config """
|
||||||
model = models.Fetch
|
model = models.Fetch
|
||||||
load_instance = True
|
load_instance = True
|
||||||
|
|
||||||
|
sibling = True
|
||||||
include_by_context = {
|
include_by_context = {
|
||||||
('full', 'import'): {'last_check', 'error'},
|
('full', 'import'): {'last_check', 'error'},
|
||||||
}
|
}
|
||||||
@ -405,52 +808,25 @@ class UserSchema(BaseSchema):
|
|||||||
model = models.User
|
model = models.User
|
||||||
load_instance = True
|
load_instance = True
|
||||||
include_relationships = True
|
include_relationships = True
|
||||||
exclude = ['domain', 'quota_bytes_used']
|
exclude = ['_email', 'domain', 'localpart', 'domain_name', 'quota_bytes_used']
|
||||||
|
|
||||||
|
primary_keys = ['email']
|
||||||
exclude_by_value = {
|
exclude_by_value = {
|
||||||
'forward_destination': [[]],
|
'forward_destination': [[]],
|
||||||
'tokens': [[]],
|
'tokens': [[]],
|
||||||
'fetches': [[]],
|
'fetches': [[]],
|
||||||
'manager_of': [[]],
|
'manager_of': [[]],
|
||||||
'reply_enddate': ['2999-12-31'],
|
'reply_enddate': ['2999-12-31'],
|
||||||
'reply_startdate': ['1900-01-01'],
|
'reply_startdate': ['1900-01-01'],
|
||||||
}
|
}
|
||||||
|
|
||||||
@pre_load
|
email = fields.String(required=True)
|
||||||
def _handle_email_and_password(self, data, many, **kwargs): # pylint: disable=unused-argument
|
|
||||||
data = handle_email(data)
|
|
||||||
if 'password' in data:
|
|
||||||
if 'password_hash' in data or 'hash_scheme' in data:
|
|
||||||
raise ValidationError('ambigous key password and password_hash/hash_scheme')
|
|
||||||
# check (hashed) password
|
|
||||||
password = data['password']
|
|
||||||
if password.startswith('{') and '}' in password:
|
|
||||||
scheme = password[1:password.index('}')]
|
|
||||||
if scheme not in self.Meta.model.scheme_dict:
|
|
||||||
raise ValidationError(f'invalid password scheme {scheme!r}')
|
|
||||||
else:
|
|
||||||
raise ValidationError(f'invalid hashed password {password!r}')
|
|
||||||
elif 'password_hash' in data and 'hash_scheme' in data:
|
|
||||||
if data['hash_scheme'] not in self.Meta.model.scheme_dict:
|
|
||||||
raise ValidationError(f'invalid password scheme {data["hash_scheme"]!r}')
|
|
||||||
data['password'] = f'{{{data["hash_scheme"]}}}{data["password_hash"]}'
|
|
||||||
del data['hash_scheme']
|
|
||||||
del data['password_hash']
|
|
||||||
return data
|
|
||||||
|
|
||||||
# TODO: verify password (should this be done in model?)
|
|
||||||
# scheme, hashed = re.match('^(?:{([^}]+)})?(.*)$', self.password).groups()
|
|
||||||
# if not scheme...
|
|
||||||
# ctx = passlib.context.CryptContext(schemes=[scheme], default=scheme)
|
|
||||||
# try:
|
|
||||||
# ctx.verify('', hashed)
|
|
||||||
# =>? ValueError: hash could not be identified
|
|
||||||
|
|
||||||
localpart = fields.Str(load_only=True)
|
|
||||||
domain_name = fields.Str(load_only=True)
|
|
||||||
tokens = fields.Nested(TokenSchema, many=True)
|
tokens = fields.Nested(TokenSchema, many=True)
|
||||||
fetches = fields.Nested(FetchSchema, many=True)
|
fetches = fields.Nested(FetchSchema, many=True)
|
||||||
|
|
||||||
|
password = PasswordField(required=True, metadata={'model': models.User})
|
||||||
|
hash_password = fields.Boolean(load_only=True, missing=False)
|
||||||
|
|
||||||
|
|
||||||
@mapped
|
@mapped
|
||||||
class AliasSchema(BaseSchema):
|
class AliasSchema(BaseSchema):
|
||||||
@ -459,18 +835,14 @@ class AliasSchema(BaseSchema):
|
|||||||
""" Schema config """
|
""" Schema config """
|
||||||
model = models.Alias
|
model = models.Alias
|
||||||
load_instance = True
|
load_instance = True
|
||||||
exclude = ['domain']
|
exclude = ['_email', 'domain', 'localpart', 'domain_name']
|
||||||
|
|
||||||
|
primary_keys = ['email']
|
||||||
exclude_by_value = {
|
exclude_by_value = {
|
||||||
'destination': [[]],
|
'destination': [[]],
|
||||||
}
|
}
|
||||||
|
|
||||||
@pre_load
|
email = fields.String(required=True)
|
||||||
def _handle_email(self, data, many, **kwargs): # pylint: disable=unused-argument
|
|
||||||
return handle_email(data)
|
|
||||||
|
|
||||||
localpart = fields.Str(load_only=True)
|
|
||||||
domain_name = fields.Str(load_only=True)
|
|
||||||
destination = CommaSeparatedListField()
|
destination = CommaSeparatedListField()
|
||||||
|
|
||||||
|
|
||||||
@ -499,7 +871,7 @@ class MailuSchema(Schema):
|
|||||||
render_module = RenderYAML
|
render_module = RenderYAML
|
||||||
|
|
||||||
ordered = True
|
ordered = True
|
||||||
order = ['config', 'domains', 'users', 'aliases', 'relays']
|
order = ['domain', 'user', 'alias', 'relay'] # 'config'
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
@ -511,6 +883,14 @@ class MailuSchema(Schema):
|
|||||||
except KeyError:
|
except KeyError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
def _call_and_store(self, *args, **kwargs):
|
||||||
|
""" track current parent and field for pruning """
|
||||||
|
self.context.update({
|
||||||
|
'parent': self.context.get('config'),
|
||||||
|
'parent_field': kwargs['field_name'],
|
||||||
|
})
|
||||||
|
return super()._call_and_store(*args, **kwargs)
|
||||||
|
|
||||||
@pre_load
|
@pre_load
|
||||||
def _clear_config(self, data, many, **kwargs): # pylint: disable=unused-argument
|
def _clear_config(self, data, many, **kwargs): # pylint: disable=unused-argument
|
||||||
""" create config object in context if missing
|
""" create config object in context if missing
|
||||||
@ -534,8 +914,8 @@ class MailuSchema(Schema):
|
|||||||
|
|
||||||
return config
|
return config
|
||||||
|
|
||||||
config = fields.Nested(ConfigSchema, many=True)
|
domain = fields.Nested(DomainSchema, many=True)
|
||||||
domains = fields.Nested(DomainSchema, many=True)
|
user = fields.Nested(UserSchema, many=True)
|
||||||
users = fields.Nested(UserSchema, many=True)
|
alias = fields.Nested(AliasSchema, many=True)
|
||||||
aliases = fields.Nested(AliasSchema, many=True)
|
relay = fields.Nested(RelaySchema, many=True)
|
||||||
relays = fields.Nested(RelaySchema, many=True)
|
# config = fields.Nested(ConfigSchema, many=True)
|
||||||
|
221
docs/cli.rst
221
docs/cli.rst
@ -10,8 +10,9 @@ Managing users and aliases can be done from CLI using commands:
|
|||||||
* user
|
* user
|
||||||
* user-import
|
* user-import
|
||||||
* user-delete
|
* user-delete
|
||||||
* config-dump
|
|
||||||
* config-update
|
* config-update
|
||||||
|
* config-export
|
||||||
|
* config-import
|
||||||
|
|
||||||
alias
|
alias
|
||||||
-----
|
-----
|
||||||
@ -69,104 +70,160 @@ user-delete
|
|||||||
|
|
||||||
docker-compose exec admin flask mailu user-delete foo@example.net
|
docker-compose exec admin flask mailu user-delete foo@example.net
|
||||||
|
|
||||||
config-dump
|
|
||||||
-----------
|
|
||||||
|
|
||||||
The purpose of this command is to dump domain-, relay-, alias- and user-configuration to a YAML template.
|
|
||||||
|
|
||||||
.. code-block:: bash
|
|
||||||
|
|
||||||
# docker-compose exec admin flask mailu config-dump --help
|
|
||||||
|
|
||||||
Usage: flask mailu config-dump [OPTIONS] [SECTIONS]...
|
|
||||||
|
|
||||||
dump configuration as YAML-formatted data to stdout
|
|
||||||
|
|
||||||
SECTIONS can be: domains, relays, users, aliases
|
|
||||||
|
|
||||||
Options:
|
|
||||||
-f, --full Include default attributes
|
|
||||||
-s, --secrets Include secrets (dkim-key, plain-text / not hashed)
|
|
||||||
-d, --dns Include dns records
|
|
||||||
--help Show this message and exit.
|
|
||||||
|
|
||||||
If you want to export secrets (dkim-keys, plain-text / not hashed) you have to add the ``--secrets`` option.
|
|
||||||
Only non-default attributes are dumped. If you want to dump all attributes use ``--full``.
|
|
||||||
To include dns records (mx, spf, dkim and dmarc) add the ``--dns`` option.
|
|
||||||
Unless you specify some sections all sections are dumped by default.
|
|
||||||
|
|
||||||
.. code-block:: bash
|
|
||||||
|
|
||||||
docker-compose exec admin flask mailu config-dump > mail-config.yml
|
|
||||||
|
|
||||||
docker-compose exec admin flask mailu config-dump --dns domains
|
|
||||||
|
|
||||||
config-update
|
config-update
|
||||||
-------------
|
-------------
|
||||||
|
|
||||||
The purpose of this command is for importing domain-, relay-, alias- and user-configuration in bulk and synchronizing DB entries with an external YAML template.
|
The sole purpose of this command is for importing users/aliases in bulk and synchronizing DB entries with external YAML template:
|
||||||
|
|
||||||
.. code-block:: bash
|
.. code-block:: bash
|
||||||
|
|
||||||
# docker-compose exec admin flask mailu config-update --help
|
cat mail-config.yml | docker-compose exec -T admin flask mailu config-update --delete-objects
|
||||||
|
|
||||||
Usage: flask mailu config-update [OPTIONS]
|
where mail-config.yml looks like:
|
||||||
|
|
||||||
sync configuration with data from YAML-formatted stdin
|
.. code-block:: bash
|
||||||
|
|
||||||
Options:
|
users:
|
||||||
-v, --verbose Increase verbosity
|
- localpart: foo
|
||||||
-d, --delete-objects Remove objects not included in yaml
|
domain: example.com
|
||||||
-n, --dry-run Perform a trial run with no changes made
|
password_hash: klkjhumnzxcjkajahsdqweqqwr
|
||||||
--help Show this message and exit.
|
hash_scheme: MD5-CRYPT
|
||||||
|
|
||||||
|
aliases:
|
||||||
|
- localpart: alias1
|
||||||
|
domain: example.com
|
||||||
|
destination: "user1@example.com,user2@example.com"
|
||||||
|
|
||||||
|
without ``--delete-object`` option config-update will only add/update new values but will *not* remove any entries missing in provided YAML input.
|
||||||
|
|
||||||
|
Users
|
||||||
|
-----
|
||||||
|
|
||||||
|
following are additional parameters that could be defined for users:
|
||||||
|
|
||||||
|
* comment
|
||||||
|
* quota_bytes
|
||||||
|
* global_admin
|
||||||
|
* enable_imap
|
||||||
|
* enable_pop
|
||||||
|
* forward_enabled
|
||||||
|
* forward_destination
|
||||||
|
* reply_enabled
|
||||||
|
* reply_subject
|
||||||
|
* reply_body
|
||||||
|
* displayed_name
|
||||||
|
* spam_enabled
|
||||||
|
* spam_threshold
|
||||||
|
|
||||||
|
Alias
|
||||||
|
-----
|
||||||
|
|
||||||
|
additional fields:
|
||||||
|
|
||||||
|
* wildcard
|
||||||
|
|
||||||
|
config-export
|
||||||
|
-------------
|
||||||
|
|
||||||
|
The purpose of this command is to export domain-, relay-, alias- and user-configuration in YAML or JSON format.
|
||||||
|
|
||||||
|
.. code-block:: bash
|
||||||
|
|
||||||
|
# docker-compose exec admin flask mailu config-export --help
|
||||||
|
|
||||||
|
Usage: flask mailu config-export [OPTIONS] [FILTER]...
|
||||||
|
|
||||||
|
Export configuration as YAML or JSON to stdout or file
|
||||||
|
|
||||||
|
Options:
|
||||||
|
-f, --full Include attributes with default value.
|
||||||
|
-s, --secrets Include secret attributes (dkim-key, passwords).
|
||||||
|
-c, --color Force colorized output.
|
||||||
|
-d, --dns Include dns records.
|
||||||
|
-o, --output-file FILENAME Save configuration to file.
|
||||||
|
-j, --json Export configuration in json format.
|
||||||
|
-?, -h, --help Show this message and exit.
|
||||||
|
|
||||||
|
Only non-default attributes are exported. If you want to export all attributes use ``--full``.
|
||||||
|
If you want to export plain-text secrets (dkim-keys, passwords) you have to add the ``--secrets`` option.
|
||||||
|
To include dns records (mx, spf, dkim and dmarc) add the ``--dns`` option.
|
||||||
|
By default all configuration objects are exported (domain, user, alias, relay). You can specify
|
||||||
|
filters to export only some objects or attributes (try: ``user`` or ``domain.name``).
|
||||||
|
|
||||||
|
.. code-block:: bash
|
||||||
|
|
||||||
|
docker-compose exec admin flask mailu config-export -o mail-config.yml
|
||||||
|
|
||||||
|
docker-compose exec admin flask mailu config-export --dns domain.dns_mx domain.dns_spf
|
||||||
|
|
||||||
|
config-import
|
||||||
|
-------------
|
||||||
|
|
||||||
|
The purpose of this command is for importing domain-, relay-, alias- and user-configuration in bulk and synchronizing DB entries with an external YAML/JOSN source.
|
||||||
|
|
||||||
|
.. code-block:: bash
|
||||||
|
|
||||||
|
# docker-compose exec admin flask mailu config-import --help
|
||||||
|
|
||||||
|
Usage: flask mailu config-import [OPTIONS] [FILENAME|-]
|
||||||
|
|
||||||
|
Import configuration as YAML or JSON from stdin or file
|
||||||
|
|
||||||
|
Options:
|
||||||
|
-v, --verbose Increase verbosity.
|
||||||
|
-s, --secrets Show secret attributes in messages.
|
||||||
|
-q, --quiet Quiet mode - only show errors.
|
||||||
|
-c, --color Force colorized output.
|
||||||
|
-u, --update Update mode - merge input with existing config.
|
||||||
|
-n, --dry-run Perform a trial run with no changes made.
|
||||||
|
-?, -h, --help Show this message and exit.
|
||||||
|
|
||||||
The current version of docker-compose exec does not pass stdin correctly, so you have to user docker exec instead:
|
The current version of docker-compose exec does not pass stdin correctly, so you have to user docker exec instead:
|
||||||
|
|
||||||
.. code-block:: bash
|
.. code-block:: bash
|
||||||
|
|
||||||
docker exec -i $(docker-compose ps -q admin) flask mailu config-update -nvd < mail-config.yml
|
docker exec -i $(docker-compose ps -q admin) flask mailu config-import -nv < mail-config.yml
|
||||||
|
|
||||||
|
mail-config.yml contains the configuration and looks like this:
|
||||||
mail-config.yml looks like this:
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
.. code-block:: yaml
|
||||||
|
|
||||||
domains:
|
domain:
|
||||||
- name: example.com
|
- name: example.com
|
||||||
alternatives:
|
alternatives:
|
||||||
- alternative.example.com
|
- alternative.example.com
|
||||||
|
|
||||||
users:
|
user:
|
||||||
- email: foo@example.com
|
- email: foo@example.com
|
||||||
password_hash: klkjhumnzxcjkajahsdqweqqwr
|
password_hash: '$2b$12$...'
|
||||||
hash_scheme: MD5-CRYPT
|
hash_scheme: MD5-CRYPT
|
||||||
|
|
||||||
aliases:
|
alias:
|
||||||
- email: alias1@example.com
|
- email: alias1@example.com
|
||||||
destination: "user1@example.com,user2@example.com"
|
destination:
|
||||||
|
- user1@example.com
|
||||||
|
- user2@example.com
|
||||||
|
|
||||||
relays:
|
relay:
|
||||||
- name: relay.example.com
|
- name: relay.example.com
|
||||||
comment: test
|
comment: test
|
||||||
smtp: mx.example.com
|
smtp: mx.example.com
|
||||||
|
|
||||||
You can use ``--dry-run`` to test your YAML without comitting any changes to the database.
|
config-update shows the number of created/modified/deleted objects after import.
|
||||||
With ``--verbose`` config-update will show exactly what it changes in the database.
|
To suppress all messages except error messages use ``--quiet``.
|
||||||
Without ``--delete-object`` option config-update will only add/update changed values but will *not* remove any entries missing in provided YAML input.
|
By adding the ``--verbose`` switch (one or more times) the import gets more detailed and shows exactyl what attributes changed.
|
||||||
|
In all messages plain-text secrets (dkim-keys, passwords) are hidden by default. Use ``--secrets`` to show secrets.
|
||||||
|
If you want to test what would be done when importing use ``--dry-run``.
|
||||||
|
By default config-update replaces the whole configuration. You can use ``--update`` to change the existing configuration instead.
|
||||||
|
When updating you can add new and change existing objects.
|
||||||
|
To delete an object use ``-key: value`` (To delete the domain example.com ``-name: example.com`` for example).
|
||||||
|
To reset an attribute to default use ``-key: null`` (To reset enable_imap ``-enable_imap: null`` for example).
|
||||||
|
|
||||||
This is a complete YAML template with all additional parameters that could be defined:
|
This is a complete YAML template with all additional parameters that can be defined:
|
||||||
|
|
||||||
.. code-block:: yaml
|
.. code-block:: yaml
|
||||||
|
|
||||||
aliases:
|
domain:
|
||||||
- email: email@example.com
|
|
||||||
comment: ''
|
|
||||||
destination:
|
|
||||||
- address@example.com
|
|
||||||
wildcard: false
|
|
||||||
|
|
||||||
domains:
|
|
||||||
- name: example.com
|
- name: example.com
|
||||||
alternatives:
|
alternatives:
|
||||||
- alternative.tld
|
- alternative.tld
|
||||||
@ -176,13 +233,8 @@ This is a complete YAML template with all additional parameters that could be de
|
|||||||
max_quota_bytes: 0
|
max_quota_bytes: 0
|
||||||
max_users: -1
|
max_users: -1
|
||||||
signup_enabled: false
|
signup_enabled: false
|
||||||
|
|
||||||
relays:
|
user:
|
||||||
- name: relay.example.com
|
|
||||||
comment: ''
|
|
||||||
smtp: mx.example.com
|
|
||||||
|
|
||||||
users:
|
|
||||||
- email: postmaster@example.com
|
- email: postmaster@example.com
|
||||||
comment: ''
|
comment: ''
|
||||||
displayed_name: 'Postmaster'
|
displayed_name: 'Postmaster'
|
||||||
@ -192,13 +244,16 @@ This is a complete YAML template with all additional parameters that could be de
|
|||||||
fetches:
|
fetches:
|
||||||
- id: 1
|
- id: 1
|
||||||
comment: 'test fetch'
|
comment: 'test fetch'
|
||||||
username: fetch-user
|
error: null
|
||||||
host: other.example.com
|
host: other.example.com
|
||||||
|
keep: true
|
||||||
|
last_check: '2020-12-29T17:09:48.200179'
|
||||||
password: 'secret'
|
password: 'secret'
|
||||||
|
hash_password: true
|
||||||
port: 993
|
port: 993
|
||||||
protocol: imap
|
protocol: imap
|
||||||
tls: true
|
tls: true
|
||||||
keep: true
|
username: fetch-user
|
||||||
forward_destination:
|
forward_destination:
|
||||||
- address@remote.example.com
|
- address@remote.example.com
|
||||||
forward_enabled: true
|
forward_enabled: true
|
||||||
@ -206,12 +261,13 @@ This is a complete YAML template with all additional parameters that could be de
|
|||||||
global_admin: true
|
global_admin: true
|
||||||
manager_of:
|
manager_of:
|
||||||
- example.com
|
- example.com
|
||||||
password: '{BLF-CRYPT}$2b$12$...'
|
password: '$2b$12$...'
|
||||||
|
hash_password: true
|
||||||
quota_bytes: 1000000000
|
quota_bytes: 1000000000
|
||||||
reply_body: ''
|
reply_body: ''
|
||||||
reply_enabled: false
|
reply_enabled: false
|
||||||
reply_enddate: 2999-12-31
|
reply_enddate: '2999-12-31'
|
||||||
reply_startdate: 1900-01-01
|
reply_startdate: '1900-01-01'
|
||||||
reply_subject: ''
|
reply_subject: ''
|
||||||
spam_enabled: true
|
spam_enabled: true
|
||||||
spam_threshold: 80
|
spam_threshold: 80
|
||||||
@ -219,5 +275,16 @@ This is a complete YAML template with all additional parameters that could be de
|
|||||||
- id: 1
|
- id: 1
|
||||||
comment: email-client
|
comment: email-client
|
||||||
ip: 192.168.1.1
|
ip: 192.168.1.1
|
||||||
password: '$5$rounds=1000$...'
|
password: '$5$rounds=1$...'
|
||||||
|
|
||||||
|
aliases:
|
||||||
|
- email: email@example.com
|
||||||
|
comment: ''
|
||||||
|
destination:
|
||||||
|
- address@example.com
|
||||||
|
wildcard: false
|
||||||
|
|
||||||
|
relay:
|
||||||
|
- name: relay.example.com
|
||||||
|
comment: ''
|
||||||
|
smtp: mx.example.com
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
cat << EOF | docker-compose -f tests/compose/core/docker-compose.yml exec -T admin flask mailu config-update --verbose
|
cat << EOF | docker-compose -f tests/compose/core/docker-compose.yml exec -T admin flask mailu config-update -v 1
|
||||||
users:
|
users:
|
||||||
- localpart: forwardinguser
|
- localpart: forwardinguser
|
||||||
password_hash: "\$1\$F2OStvi1\$Q8hBIHkdJpJkJn/TrMIZ9/"
|
password_hash: "\$1\$F2OStvi1\$Q8hBIHkdJpJkJn/TrMIZ9/"
|
||||||
@ -10,7 +10,7 @@ EOF
|
|||||||
|
|
||||||
python3 tests/forward_test.py
|
python3 tests/forward_test.py
|
||||||
|
|
||||||
cat << EOF | docker-compose -f tests/compose/core/docker-compose.yml exec -T admin flask mailu config-update --verbose
|
cat << EOF | docker-compose -f tests/compose/core/docker-compose.yml exec -T admin flask mailu config-update -v 1
|
||||||
users:
|
users:
|
||||||
- localpart: forwardinguser
|
- localpart: forwardinguser
|
||||||
password_hash: "\$1\$F2OStvi1\$Q8hBIHkdJpJkJn/TrMIZ9/"
|
password_hash: "\$1\$F2OStvi1\$Q8hBIHkdJpJkJn/TrMIZ9/"
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
cat << EOF | docker-compose -f tests/compose/core/docker-compose.yml exec -T admin flask mailu config-update --verbose
|
cat << EOF | docker-compose -f tests/compose/core/docker-compose.yml exec -T admin flask mailu config-update -v 1
|
||||||
aliases:
|
aliases:
|
||||||
- localpart: alltheusers
|
- localpart: alltheusers
|
||||||
domain: mailu.io
|
domain: mailu.io
|
||||||
@ -7,6 +7,6 @@ EOF
|
|||||||
|
|
||||||
python3 tests/alias_test.py
|
python3 tests/alias_test.py
|
||||||
|
|
||||||
cat << EOF | docker-compose -f tests/compose/core/docker-compose.yml exec -T admin flask mailu config-update --verbose
|
cat << EOF | docker-compose -f tests/compose/core/docker-compose.yml exec -T admin flask mailu config-update -v 1
|
||||||
aliases: []
|
aliases: []
|
||||||
EOF
|
EOF
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
cat << EOF | docker-compose -f tests/compose/core/docker-compose.yml exec -T admin flask mailu config-update --verbose
|
cat << EOF | docker-compose -f tests/compose/core/docker-compose.yml exec -T admin flask mailu config-update -v 1
|
||||||
users:
|
users:
|
||||||
- localpart: replyuser
|
- localpart: replyuser
|
||||||
password_hash: "\$1\$F2OStvi1\$Q8hBIHkdJpJkJn/TrMIZ9/"
|
password_hash: "\$1\$F2OStvi1\$Q8hBIHkdJpJkJn/TrMIZ9/"
|
||||||
@ -11,7 +11,7 @@ EOF
|
|||||||
|
|
||||||
python3 tests/reply_test.py
|
python3 tests/reply_test.py
|
||||||
|
|
||||||
cat << EOF | docker-compose -f tests/compose/core/docker-compose.yml exec -T admin flask mailu config-update --verbose
|
cat << EOF | docker-compose -f tests/compose/core/docker-compose.yml exec -T admin flask mailu config-update -v 1
|
||||||
users:
|
users:
|
||||||
- localpart: replyuser
|
- localpart: replyuser
|
||||||
password_hash: "\$1\$F2OStvi1\$Q8hBIHkdJpJkJn/TrMIZ9/"
|
password_hash: "\$1\$F2OStvi1\$Q8hBIHkdJpJkJn/TrMIZ9/"
|
||||||
|
@ -1 +1 @@
|
|||||||
Added cli command config-dump and enhanced config-update
|
Add cli commands config-import and config-export
|
||||||
|
Loading…
x
Reference in New Issue
Block a user