2021-01-14 02:11:04 +02:00
|
|
|
""" Mailu config storage model
|
|
|
|
"""
|
2016-02-20 14:57:26 +02:00
|
|
|
|
2021-01-14 02:11:04 +02:00
|
|
|
import os
|
|
|
|
import smtplib
|
|
|
|
import json
|
|
|
|
|
|
|
|
from datetime import date
|
2017-11-10 12:55:58 +02:00
|
|
|
from email.mime import text
|
2021-01-15 14:53:47 +02:00
|
|
|
from itertools import chain
|
2017-11-10 12:55:58 +02:00
|
|
|
|
2018-10-18 15:57:43 +02:00
|
|
|
import flask_sqlalchemy
|
2018-07-26 21:38:21 +02:00
|
|
|
import sqlalchemy
|
2021-01-24 20:07:48 +02:00
|
|
|
import passlib.context
|
|
|
|
import passlib.hash
|
2021-03-09 14:31:21 +02:00
|
|
|
import passlib.registry
|
2016-06-25 15:50:05 +02:00
|
|
|
import time
|
|
|
|
import os
|
2021-02-06 18:23:05 +02:00
|
|
|
import hmac
|
2017-11-10 12:55:58 +02:00
|
|
|
import smtplib
|
2018-04-12 21:35:38 +02:00
|
|
|
import idna
|
2018-04-21 16:37:30 +02:00
|
|
|
import dns
|
2021-01-14 02:11:04 +02:00
|
|
|
|
|
|
|
from flask import current_app as app
|
|
|
|
from sqlalchemy.ext import declarative
|
2021-02-15 01:46:59 +02:00
|
|
|
from sqlalchemy.ext.hybrid import hybrid_property
|
2021-01-14 02:11:04 +02:00
|
|
|
from sqlalchemy.inspection import inspect
|
|
|
|
from werkzeug.utils import cached_property
|
|
|
|
|
2021-08-08 09:21:14 +02:00
|
|
|
from mailu import dkim, utils
|
2018-04-12 21:35:38 +02:00
|
|
|
|
|
|
|
|
2018-10-18 15:57:43 +02:00
|
|
|
db = flask_sqlalchemy.SQLAlchemy()
|
|
|
|
|
|
|
|
|
2018-04-14 13:00:29 +02:00
|
|
|
class IdnaDomain(db.TypeDecorator):
|
2018-04-12 21:35:38 +02:00
|
|
|
""" Stores a Unicode string in it's IDNA representation (ASCII only)
|
|
|
|
"""
|
|
|
|
|
2018-12-09 17:06:53 +02:00
|
|
|
impl = db.String(80)
|
2018-04-12 21:35:38 +02:00
|
|
|
|
|
|
|
def process_bind_param(self, value, dialect):
|
2021-01-14 02:11:04 +02:00
|
|
|
""" encode unicode domain name to punycode """
|
2021-01-15 14:53:47 +02:00
|
|
|
return idna.encode(value.lower()).decode('ascii')
|
2018-04-12 21:35:38 +02:00
|
|
|
|
|
|
|
def process_result_value(self, value, dialect):
|
2021-01-14 02:11:04 +02:00
|
|
|
""" decode punycode domain name to unicode """
|
2018-04-12 21:35:38 +02:00
|
|
|
return idna.decode(value)
|
2016-03-20 16:36:56 +02:00
|
|
|
|
2020-08-26 11:27:38 +02:00
|
|
|
python_type = str
|
2016-03-19 21:37:48 +02:00
|
|
|
|
2018-04-14 13:00:29 +02:00
|
|
|
class IdnaEmail(db.TypeDecorator):
|
|
|
|
""" Stores a Unicode string in it's IDNA representation (ASCII only)
|
|
|
|
"""
|
|
|
|
|
2018-12-09 17:06:53 +02:00
|
|
|
impl = db.String(255)
|
2018-04-14 13:00:29 +02:00
|
|
|
|
|
|
|
def process_bind_param(self, value, dialect):
|
2021-01-14 02:11:04 +02:00
|
|
|
""" encode unicode domain part of email address to punycode """
|
Prevent traceback when using non-email in login
There's a traceback when the username used to log via SMTPAUTH
in is not an email address:
=== before ===
```
[...] ERROR in app: Exception on /internal/auth/email [GET]
Traceback (most recent call last):
File "/usr/lib/python3.9/site-packages/sqlalchemy/engine/base.py", line 1179, in _execute_context
context = constructor(dialect, self, conn, *args)
File "/usr/lib/python3.9/site-packages/sqlalchemy/engine/default.py", line 719, in _init_compiled
param.append(processors[key](compiled_params[key]))
File "/usr/lib/python3.9/site-packages/sqlalchemy/sql/type_api.py", line 1201, in process
return process_param(value, dialect)
File "/app/mailu/models.py", line 60, in process_bind_param
localpart, domain_name = value.lower().rsplit('@', 1)
ValueError: not enough values to unpack (expected 2, got 1)
[...]
[parameters: [{'%(140657157923216 param)s': 'foobar'}]]
```
=== after ===
```
[...] WARNING in nginx: Invalid user 'foobar': (builtins.ValueError) invalid email address (no "@")
```
2021-09-28 10:38:37 +02:00
|
|
|
if not '@' in value:
|
|
|
|
raise ValueError('invalid email address (no "@")')
|
2021-07-30 22:26:20 +02:00
|
|
|
localpart, domain_name = value.lower().rsplit('@', 1)
|
2021-01-15 14:53:47 +02:00
|
|
|
if '@' in localpart:
|
|
|
|
raise ValueError('email local part must not contain "@"')
|
|
|
|
return f'{localpart}@{idna.encode(domain_name).decode("ascii")}'
|
2018-04-14 13:00:29 +02:00
|
|
|
|
|
|
|
def process_result_value(self, value, dialect):
|
2021-01-14 02:11:04 +02:00
|
|
|
""" decode punycode domain part of email to unicode """
|
2021-01-15 14:53:47 +02:00
|
|
|
localpart, domain_name = value.rsplit('@', 1)
|
|
|
|
return f'{localpart}@{idna.decode(domain_name)}'
|
2016-02-20 14:57:26 +02:00
|
|
|
|
2020-08-26 11:27:38 +02:00
|
|
|
python_type = str
|
2016-02-20 21:11:59 +02:00
|
|
|
|
2016-08-11 13:33:04 +02:00
|
|
|
class CommaSeparatedList(db.TypeDecorator):
|
|
|
|
""" Stores a list as a comma-separated string, compatible with Postfix.
|
|
|
|
"""
|
|
|
|
|
|
|
|
impl = db.String
|
|
|
|
|
|
|
|
def process_bind_param(self, value, dialect):
|
2021-01-14 02:11:04 +02:00
|
|
|
""" join list of items to comma separated string """
|
|
|
|
if not isinstance(value, (list, tuple, set)):
|
|
|
|
raise TypeError('Must be a list of strings')
|
2016-08-11 13:33:04 +02:00
|
|
|
for item in value:
|
2020-12-21 00:45:27 +02:00
|
|
|
if ',' in item:
|
2021-01-15 14:53:47 +02:00
|
|
|
raise ValueError('list item must not contain ","')
|
2021-01-24 20:07:48 +02:00
|
|
|
return ','.join(sorted(set(value)))
|
2016-08-11 13:33:04 +02:00
|
|
|
|
|
|
|
def process_result_value(self, value, dialect):
|
2021-01-14 02:11:04 +02:00
|
|
|
""" split comma separated string to list """
|
2021-02-22 21:35:23 +02:00
|
|
|
return list(filter(bool, (item.strip() for item in value.split(',')))) if value else []
|
2016-08-11 13:33:04 +02:00
|
|
|
|
2020-08-26 11:27:38 +02:00
|
|
|
python_type = list
|
2016-08-11 13:33:04 +02:00
|
|
|
|
2018-10-18 15:57:43 +02:00
|
|
|
class JSONEncoded(db.TypeDecorator):
|
2020-08-26 11:11:23 +02:00
|
|
|
""" Represents an immutable structure as a json-encoded string.
|
2018-10-18 15:57:43 +02:00
|
|
|
"""
|
|
|
|
|
|
|
|
impl = db.String
|
|
|
|
|
|
|
|
def process_bind_param(self, value, dialect):
|
2021-01-14 02:11:04 +02:00
|
|
|
""" encode data as json """
|
2018-10-18 15:57:43 +02:00
|
|
|
return json.dumps(value) if value else None
|
|
|
|
|
|
|
|
def process_result_value(self, value, dialect):
|
2021-01-14 02:11:04 +02:00
|
|
|
""" decode json to data """
|
2018-10-18 15:57:43 +02:00
|
|
|
return json.loads(value) if value else None
|
|
|
|
|
2020-08-26 11:27:38 +02:00
|
|
|
python_type = str
|
2018-10-18 15:57:43 +02:00
|
|
|
|
2018-12-09 17:06:53 +02:00
|
|
|
class Base(db.Model):
|
|
|
|
""" Base class for all models
|
2018-10-18 15:57:43 +02:00
|
|
|
"""
|
|
|
|
|
2018-12-09 17:06:53 +02:00
|
|
|
__abstract__ = True
|
|
|
|
|
|
|
|
metadata = sqlalchemy.schema.MetaData(
|
|
|
|
naming_convention={
|
2020-12-21 00:45:27 +02:00
|
|
|
'fk': '%(table_name)s_%(column_0_name)s_fkey',
|
|
|
|
'pk': '%(table_name)s_pkey'
|
2018-12-09 17:06:53 +02:00
|
|
|
}
|
|
|
|
)
|
|
|
|
|
2019-01-15 17:41:58 +02:00
|
|
|
created_at = db.Column(db.Date, nullable=False, default=date.today)
|
|
|
|
updated_at = db.Column(db.Date, nullable=True, onupdate=date.today)
|
2021-01-13 01:05:43 +02:00
|
|
|
comment = db.Column(db.String(255), nullable=True, default='')
|
2018-10-18 15:57:43 +02:00
|
|
|
|
2021-02-15 01:46:59 +02:00
|
|
|
def __str__(self):
|
|
|
|
pkey = self.__table__.primary_key.columns.values()[0].name
|
|
|
|
if pkey == 'email':
|
|
|
|
# ugly hack for email declared attr. _email is not always up2date
|
|
|
|
return str(f'{self.localpart}@{self.domain_name}')
|
2021-02-19 19:01:02 +02:00
|
|
|
return str(getattr(self, pkey))
|
2021-02-15 01:46:59 +02:00
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return f'<{self.__class__.__name__} {str(self)!r}>'
|
|
|
|
|
|
|
|
def __eq__(self, other):
|
|
|
|
if isinstance(other, self.__class__):
|
|
|
|
pkey = self.__table__.primary_key.columns.values()[0].name
|
|
|
|
this = getattr(self, pkey, None)
|
|
|
|
other = getattr(other, pkey, None)
|
|
|
|
return this is not None and other is not None and str(this) == str(other)
|
|
|
|
else:
|
|
|
|
return NotImplemented
|
|
|
|
|
2021-02-19 19:01:02 +02:00
|
|
|
# we need hashable instances here for sqlalchemy to update collections
|
|
|
|
# in collections.bulk_replace, but auto-incrementing don't always have
|
|
|
|
# a valid primary key, in this case we use the object's id
|
|
|
|
__hashed = None
|
2021-02-15 01:46:59 +02:00
|
|
|
def __hash__(self):
|
2021-02-19 19:01:02 +02:00
|
|
|
if self.__hashed is None:
|
|
|
|
primary = getattr(self, self.__table__.primary_key.columns.values()[0].name)
|
|
|
|
self.__hashed = id(self) if primary is None else hash(primary)
|
|
|
|
return self.__hashed
|
2021-02-15 01:46:59 +02:00
|
|
|
|
2018-10-18 15:57:43 +02:00
|
|
|
|
2018-04-21 15:48:07 +02:00
|
|
|
# Many-to-many association table for domain managers
|
2018-12-09 17:06:53 +02:00
|
|
|
managers = db.Table('manager', Base.metadata,
|
2018-04-21 15:48:07 +02:00
|
|
|
db.Column('domain_name', IdnaDomain, db.ForeignKey('domain.name')),
|
|
|
|
db.Column('user_email', IdnaEmail, db.ForeignKey('user.email'))
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2018-12-09 17:06:53 +02:00
|
|
|
class Config(Base):
|
|
|
|
""" In-database configuration values
|
2016-03-20 12:31:14 +02:00
|
|
|
"""
|
|
|
|
|
2018-12-09 17:06:53 +02:00
|
|
|
name = db.Column(db.String(255), primary_key=True, nullable=False)
|
|
|
|
value = db.Column(JSONEncoded)
|
2016-03-20 12:31:14 +02:00
|
|
|
|
|
|
|
|
2021-01-24 20:07:48 +02:00
|
|
|
def _save_dkim_keys(session):
|
|
|
|
""" store DKIM keys after commit """
|
2020-12-21 00:49:42 +02:00
|
|
|
for obj in session.identity_map.values():
|
|
|
|
if isinstance(obj, Domain):
|
2021-01-24 20:07:48 +02:00
|
|
|
obj.save_dkim_key()
|
2020-12-21 00:49:42 +02:00
|
|
|
|
2016-03-20 12:31:14 +02:00
|
|
|
class Domain(Base):
|
2016-03-19 21:37:48 +02:00
|
|
|
""" A DNS domain that has mail addresses associated to it.
|
|
|
|
"""
|
2020-12-21 00:45:27 +02:00
|
|
|
|
|
|
|
__tablename__ = 'domain'
|
2016-10-16 17:18:00 +02:00
|
|
|
|
2018-04-14 13:00:29 +02:00
|
|
|
name = db.Column(IdnaDomain, primary_key=True, nullable=False)
|
2016-04-24 19:17:40 +02:00
|
|
|
managers = db.relationship('User', secondary=managers,
|
|
|
|
backref=db.backref('manager_of'), lazy='dynamic')
|
2019-01-05 14:45:55 +02:00
|
|
|
max_users = db.Column(db.Integer, nullable=False, default=-1)
|
|
|
|
max_aliases = db.Column(db.Integer, nullable=False, default=-1)
|
2021-01-08 15:22:11 +02:00
|
|
|
max_quota_bytes = db.Column(db.BigInteger, nullable=False, default=0)
|
|
|
|
signup_enabled = db.Column(db.Boolean, nullable=False, default=False)
|
2021-01-14 02:11:04 +02:00
|
|
|
|
2020-12-21 00:49:42 +02:00
|
|
|
_dkim_key = None
|
2021-01-24 20:07:48 +02:00
|
|
|
_dkim_key_on_disk = None
|
2016-02-20 14:57:26 +02:00
|
|
|
|
2020-10-24 22:31:32 +02:00
|
|
|
def _dkim_file(self):
|
2021-01-15 14:53:47 +02:00
|
|
|
""" return filename for active DKIM key """
|
2020-12-21 00:45:27 +02:00
|
|
|
return app.config['DKIM_PATH'].format(
|
2021-01-15 14:53:47 +02:00
|
|
|
domain=self.name,
|
|
|
|
selector=app.config['DKIM_SELECTOR']
|
|
|
|
)
|
2020-10-24 22:31:32 +02:00
|
|
|
|
2021-01-24 20:07:48 +02:00
|
|
|
def save_dkim_key(self):
|
|
|
|
""" save changed DKIM key to disk """
|
|
|
|
if self._dkim_key != self._dkim_key_on_disk:
|
|
|
|
file_path = self._dkim_file()
|
|
|
|
if self._dkim_key:
|
|
|
|
with open(file_path, 'wb') as handle:
|
|
|
|
handle.write(self._dkim_key)
|
|
|
|
elif os.path.exists(file_path):
|
|
|
|
os.unlink(file_path)
|
|
|
|
self._dkim_key_on_disk = self._dkim_key
|
|
|
|
|
2021-09-02 22:49:36 +02:00
|
|
|
@cached_property
|
2020-10-24 22:31:32 +02:00
|
|
|
def dns_mx(self):
|
2021-01-15 14:53:47 +02:00
|
|
|
""" return MX record for domain """
|
2021-09-02 22:49:36 +02:00
|
|
|
hostname = app.config['HOSTNAME']
|
2020-10-24 22:31:32 +02:00
|
|
|
return f'{self.name}. 600 IN MX 10 {hostname}.'
|
2021-01-14 02:11:04 +02:00
|
|
|
|
2021-09-02 22:49:36 +02:00
|
|
|
@cached_property
|
2020-10-24 22:31:32 +02:00
|
|
|
def dns_spf(self):
|
2021-01-15 14:53:47 +02:00
|
|
|
""" return SPF record for domain """
|
2021-09-02 22:49:36 +02:00
|
|
|
hostname = app.config['HOSTNAME']
|
2020-10-24 22:31:32 +02:00
|
|
|
return f'{self.name}. 600 IN TXT "v=spf1 mx a:{hostname} ~all"'
|
2021-01-14 02:11:04 +02:00
|
|
|
|
2020-10-24 22:31:32 +02:00
|
|
|
@property
|
|
|
|
def dns_dkim(self):
|
2021-01-15 14:53:47 +02:00
|
|
|
""" return DKIM record for domain """
|
2021-01-24 20:07:48 +02:00
|
|
|
if self.dkim_key:
|
2020-10-24 22:31:32 +02:00
|
|
|
selector = app.config['DKIM_SELECTOR']
|
2021-09-02 22:49:36 +02:00
|
|
|
txt = f'v=DKIM1; k=rsa; p={self.dkim_publickey}'
|
|
|
|
record = ' '.join(f'"{txt[p:p+250]}"' for p in range(0, len(txt), 250))
|
|
|
|
return f'{selector}._domainkey.{self.name}. 600 IN TXT {record}'
|
2020-10-24 22:31:32 +02:00
|
|
|
|
2021-09-02 22:49:36 +02:00
|
|
|
@cached_property
|
2020-10-24 22:31:32 +02:00
|
|
|
def dns_dmarc(self):
|
2021-01-15 14:53:47 +02:00
|
|
|
""" return DMARC record for domain """
|
2021-01-24 20:07:48 +02:00
|
|
|
if self.dkim_key:
|
2020-10-24 22:31:32 +02:00
|
|
|
domain = app.config['DOMAIN']
|
|
|
|
rua = app.config['DMARC_RUA']
|
|
|
|
rua = f' rua=mailto:{rua}@{domain};' if rua else ''
|
|
|
|
ruf = app.config['DMARC_RUF']
|
|
|
|
ruf = f' ruf=mailto:{ruf}@{domain};' if ruf else ''
|
|
|
|
return f'_dmarc.{self.name}. 600 IN TXT "v=DMARC1; p=reject;{rua}{ruf} adkim=s; aspf=s"'
|
2021-01-14 02:11:04 +02:00
|
|
|
|
2021-10-15 14:22:50 +02:00
|
|
|
@cached_property
|
|
|
|
def dns_dmarc_report(self):
|
|
|
|
""" return DMARC report record for mailu server """
|
|
|
|
if self.dkim_key:
|
|
|
|
domain = app.config['DOMAIN']
|
|
|
|
return f'{self.name}._report._dmarc.{domain}. 600 IN TXT "v=DMARC1"'
|
|
|
|
|
2021-09-02 22:49:36 +02:00
|
|
|
@cached_property
|
|
|
|
def dns_autoconfig(self):
|
|
|
|
""" return list of auto configuration records (RFC6186) """
|
|
|
|
hostname = app.config['HOSTNAME']
|
|
|
|
protocols = [
|
|
|
|
('submission', 587),
|
|
|
|
('imap', 143),
|
|
|
|
('pop3', 110),
|
|
|
|
]
|
|
|
|
if app.config['TLS_FLAVOR'] != 'notls':
|
|
|
|
protocols.extend([
|
|
|
|
('imaps', 993),
|
|
|
|
('pop3s', 995),
|
|
|
|
])
|
|
|
|
return list([
|
|
|
|
f'_{proto}._tcp.{self.name}. 600 IN SRV 1 1 {port} {hostname}.'
|
|
|
|
for proto, port
|
|
|
|
in protocols
|
|
|
|
])
|
|
|
|
|
|
|
|
@cached_property
|
|
|
|
def dns_tlsa(self):
|
|
|
|
""" return TLSA record for domain when using letsencrypt """
|
|
|
|
hostname = app.config['HOSTNAME']
|
2021-09-02 23:36:49 +02:00
|
|
|
if app.config['TLS_FLAVOR'] in ('letsencrypt', 'mail-letsencrypt'):
|
2021-09-02 22:49:36 +02:00
|
|
|
# current ISRG Root X1 (RSA 4096, O = Internet Security Research Group, CN = ISRG Root X1) @20210902
|
|
|
|
return f'_25._tcp.{hostname}. 600 IN TLSA 2 1 1 0b9fa5a59eed715c26c1020c711b4f6ec42d58b0015e14337a39dad301c5afc3'
|
|
|
|
|
2016-06-25 15:50:05 +02:00
|
|
|
@property
|
|
|
|
def dkim_key(self):
|
2021-01-15 14:53:47 +02:00
|
|
|
""" return private DKIM key """
|
2020-12-21 00:49:42 +02:00
|
|
|
if self._dkim_key is None:
|
|
|
|
file_path = self._dkim_file()
|
|
|
|
if os.path.exists(file_path):
|
|
|
|
with open(file_path, 'rb') as handle:
|
2021-01-24 20:07:48 +02:00
|
|
|
self._dkim_key = self._dkim_key_on_disk = handle.read()
|
2020-12-21 00:49:42 +02:00
|
|
|
else:
|
2021-01-24 20:07:48 +02:00
|
|
|
self._dkim_key = self._dkim_key_on_disk = b''
|
2020-12-21 00:49:42 +02:00
|
|
|
return self._dkim_key if self._dkim_key else None
|
2016-06-25 15:50:05 +02:00
|
|
|
|
|
|
|
@dkim_key.setter
|
|
|
|
def dkim_key(self, value):
|
2021-01-15 14:53:47 +02:00
|
|
|
""" set private DKIM key """
|
2020-12-21 00:49:42 +02:00
|
|
|
old_key = self.dkim_key
|
2021-01-24 20:07:48 +02:00
|
|
|
self._dkim_key = value if value is not None else b''
|
|
|
|
if self._dkim_key != old_key:
|
|
|
|
if not sqlalchemy.event.contains(db.session, 'after_commit', _save_dkim_keys):
|
|
|
|
sqlalchemy.event.listen(db.session, 'after_commit', _save_dkim_keys)
|
2016-06-25 15:50:05 +02:00
|
|
|
|
|
|
|
@property
|
|
|
|
def dkim_publickey(self):
|
2021-01-15 14:53:47 +02:00
|
|
|
""" return public part of DKIM key """
|
2016-06-25 16:06:52 +02:00
|
|
|
dkim_key = self.dkim_key
|
|
|
|
if dkim_key:
|
2020-12-21 00:49:42 +02:00
|
|
|
return dkim.strip_key(dkim_key).decode('utf8')
|
2016-06-25 15:50:05 +02:00
|
|
|
|
|
|
|
def generate_dkim_key(self):
|
2021-07-25 15:51:13 +02:00
|
|
|
""" generate new DKIM key """
|
2016-06-25 15:50:05 +02:00
|
|
|
self.dkim_key = dkim.gen_key()
|
|
|
|
|
2016-05-01 20:04:40 +02:00
|
|
|
def has_email(self, localpart):
|
2021-01-15 14:53:47 +02:00
|
|
|
""" checks if localpart is configured for domain """
|
2021-07-30 22:26:20 +02:00
|
|
|
localpart = localpart.lower()
|
2021-01-15 14:53:47 +02:00
|
|
|
for email in chain(self.users, self.aliases):
|
2016-05-01 20:04:40 +02:00
|
|
|
if email.localpart == localpart:
|
2016-03-22 20:47:15 +02:00
|
|
|
return True
|
2021-01-06 17:45:55 +02:00
|
|
|
return False
|
2016-03-22 20:47:15 +02:00
|
|
|
|
2018-04-21 13:56:20 +02:00
|
|
|
def check_mx(self):
|
2021-01-15 14:53:47 +02:00
|
|
|
""" checks if MX record for domain points to mailu host """
|
2018-04-21 13:56:20 +02:00
|
|
|
try:
|
2021-09-09 21:41:03 +02:00
|
|
|
hostnames = set(app.config['HOSTNAMES'].split(','))
|
2018-04-21 13:56:20 +02:00
|
|
|
return any(
|
2021-01-15 14:53:47 +02:00
|
|
|
rset.exchange.to_text().rstrip('.') in hostnames
|
2018-04-21 13:56:20 +02:00
|
|
|
for rset in dns.resolver.query(self.name, 'MX')
|
|
|
|
)
|
2021-01-15 14:53:47 +02:00
|
|
|
except dns.exception.DNSException:
|
2018-04-21 13:56:20 +02:00
|
|
|
return False
|
|
|
|
|
2016-02-20 14:57:26 +02:00
|
|
|
|
2017-09-03 18:30:00 +02:00
|
|
|
class Alternative(Base):
|
|
|
|
""" Alternative name for a served domain.
|
2021-01-15 14:53:47 +02:00
|
|
|
The name "domain alias" was avoided to prevent some confusion.
|
2017-09-03 18:30:00 +02:00
|
|
|
"""
|
|
|
|
|
2020-12-21 00:45:27 +02:00
|
|
|
__tablename__ = 'alternative'
|
2017-09-03 18:30:00 +02:00
|
|
|
|
2018-04-14 13:00:29 +02:00
|
|
|
name = db.Column(IdnaDomain, primary_key=True, nullable=False)
|
|
|
|
domain_name = db.Column(IdnaDomain, db.ForeignKey(Domain.name))
|
2017-09-03 18:30:00 +02:00
|
|
|
domain = db.relationship(Domain,
|
|
|
|
backref=db.backref('alternatives', cascade='all, delete-orphan'))
|
|
|
|
|
|
|
|
|
2017-09-10 20:30:03 +02:00
|
|
|
class Relay(Base):
|
|
|
|
""" Relayed mail domain.
|
|
|
|
The domain is either relayed publicly or through a specified SMTP host.
|
|
|
|
"""
|
|
|
|
|
2020-12-21 00:45:27 +02:00
|
|
|
__tablename__ = 'relay'
|
2017-09-10 20:30:03 +02:00
|
|
|
|
2018-12-09 17:54:54 +02:00
|
|
|
name = db.Column(IdnaDomain, primary_key=True, nullable=False)
|
2018-04-12 22:21:28 +02:00
|
|
|
smtp = db.Column(db.String(80), nullable=True)
|
2017-09-10 20:30:03 +02:00
|
|
|
|
|
|
|
|
2016-10-16 17:18:00 +02:00
|
|
|
class Email(object):
|
2016-05-01 20:04:40 +02:00
|
|
|
""" Abstraction for an email address (localpart and domain).
|
2016-03-19 21:37:48 +02:00
|
|
|
"""
|
|
|
|
|
2016-04-20 21:14:04 +02:00
|
|
|
localpart = db.Column(db.String(80), nullable=False)
|
2016-03-19 21:37:48 +02:00
|
|
|
|
|
|
|
@declarative.declared_attr
|
2021-02-15 01:46:59 +02:00
|
|
|
def domain_name(cls):
|
2021-01-15 14:53:47 +02:00
|
|
|
""" the domain part of the email address """
|
2018-04-14 13:00:29 +02:00
|
|
|
return db.Column(IdnaDomain, db.ForeignKey(Domain.name),
|
|
|
|
nullable=False, default=IdnaDomain)
|
2016-02-20 14:57:26 +02:00
|
|
|
|
2016-04-20 21:14:04 +02:00
|
|
|
# This field is redundant with both localpart and domain name.
|
|
|
|
# It is however very useful for quick lookups without joining tables,
|
2018-04-14 13:00:29 +02:00
|
|
|
# especially when the mail server is reading the database.
|
2016-04-20 21:14:04 +02:00
|
|
|
@declarative.declared_attr
|
2021-02-15 01:46:59 +02:00
|
|
|
def _email(cls):
|
2021-01-15 14:53:47 +02:00
|
|
|
""" the complete email address (localpart@domain) """
|
2021-02-15 01:46:59 +02:00
|
|
|
|
|
|
|
def updater(ctx):
|
|
|
|
key = f'{cls.__tablename__}_email'
|
|
|
|
if key in ctx.current_parameters:
|
|
|
|
return ctx.current_parameters[key]
|
2021-02-19 19:01:02 +02:00
|
|
|
return '{localpart}@{domain_name}'.format_map(ctx.current_parameters)
|
2021-02-15 01:46:59 +02:00
|
|
|
|
|
|
|
return db.Column('email', IdnaEmail, primary_key=True, nullable=False, onupdate=updater)
|
|
|
|
|
|
|
|
# We need to keep email, localpart and domain_name in sync.
|
|
|
|
# But IMHO using email as primary key was not a good idea in the first place.
|
|
|
|
@hybrid_property
|
|
|
|
def email(self):
|
|
|
|
""" getter for email - gets _email """
|
|
|
|
return self._email
|
|
|
|
|
|
|
|
@email.setter
|
|
|
|
def email(self, value):
|
|
|
|
""" setter for email - sets _email, localpart and domain_name at once """
|
2021-07-30 22:26:20 +02:00
|
|
|
self._email = value.lower()
|
|
|
|
self.localpart, self.domain_name = self._email.rsplit('@', 1)
|
2021-02-15 01:46:59 +02:00
|
|
|
|
2021-03-12 18:56:17 +02:00
|
|
|
@staticmethod
|
|
|
|
def _update_localpart(target, value, *_):
|
|
|
|
if target.domain_name:
|
|
|
|
target._email = f'{value}@{target.domain_name}'
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def _update_domain_name(target, value, *_):
|
|
|
|
if target.localpart:
|
|
|
|
target._email = f'{target.localpart}@{value}'
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def __declare_last__(cls):
|
|
|
|
# gets called after mappings are completed
|
2021-07-24 20:21:38 +02:00
|
|
|
sqlalchemy.event.listen(cls.localpart, 'set', cls._update_localpart, propagate=True)
|
|
|
|
sqlalchemy.event.listen(cls.domain_name, 'set', cls._update_domain_name, propagate=True)
|
2016-02-20 14:57:26 +02:00
|
|
|
|
2017-11-10 12:55:58 +02:00
|
|
|
def sendmail(self, subject, body):
|
2021-01-15 14:53:47 +02:00
|
|
|
""" send an email to the address """
|
2021-01-24 20:07:48 +02:00
|
|
|
f_addr = f'{app.config["POSTMASTER"]}@{idna.encode(app.config["DOMAIN"]).decode("ascii")}'
|
2018-04-14 23:47:41 +02:00
|
|
|
with smtplib.SMTP(app.config['HOST_AUTHSMTP'], port=10025) as smtp:
|
2021-01-15 14:53:47 +02:00
|
|
|
to_address = f'{self.localpart}@{idna.encode(self.domain_name).decode("ascii")}'
|
2017-11-10 12:55:58 +02:00
|
|
|
msg = text.MIMEText(body)
|
|
|
|
msg['Subject'] = subject
|
2021-01-24 20:07:48 +02:00
|
|
|
msg['From'] = f_addr
|
2018-06-02 10:59:57 +02:00
|
|
|
msg['To'] = to_address
|
2021-01-24 20:07:48 +02:00
|
|
|
smtp.sendmail(f_addr, [to_address], msg.as_string())
|
2018-06-02 10:59:57 +02:00
|
|
|
|
2018-10-07 16:24:48 +02:00
|
|
|
@classmethod
|
|
|
|
def resolve_domain(cls, email):
|
2021-01-15 14:53:47 +02:00
|
|
|
""" resolves domain alternative to real domain """
|
|
|
|
localpart, domain_name = email.rsplit('@', 1) if '@' in email else (None, email)
|
2021-07-30 22:26:20 +02:00
|
|
|
if alternative := Alternative.query.get(domain_name):
|
2018-10-07 16:24:48 +02:00
|
|
|
domain_name = alternative.domain_name
|
|
|
|
return (localpart, domain_name)
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def resolve_destination(cls, localpart, domain_name, ignore_forward_keep=False):
|
2021-01-15 14:53:47 +02:00
|
|
|
""" return destination for email address localpart@domain_name """
|
|
|
|
|
2018-12-27 16:36:24 +02:00
|
|
|
localpart_stripped = None
|
2019-01-10 18:30:11 +02:00
|
|
|
stripped_alias = None
|
2019-01-10 11:28:57 +02:00
|
|
|
|
2021-07-30 22:26:20 +02:00
|
|
|
delim = os.environ.get('RECIPIENT_DELIMITER')
|
|
|
|
if delim in localpart:
|
|
|
|
localpart_stripped = localpart.rsplit(delim, 1)[0]
|
2018-12-27 16:36:24 +02:00
|
|
|
|
2021-01-15 14:53:47 +02:00
|
|
|
user = User.query.get(f'{localpart}@{domain_name}')
|
2018-12-27 16:36:24 +02:00
|
|
|
if not user and localpart_stripped:
|
2021-01-15 14:53:47 +02:00
|
|
|
user = User.query.get(f'{localpart_stripped}@{domain_name}')
|
2021-07-30 22:26:20 +02:00
|
|
|
|
2018-10-07 16:24:48 +02:00
|
|
|
if user:
|
2021-01-15 14:53:47 +02:00
|
|
|
email = f'{localpart}@{domain_name}'
|
2019-12-27 22:11:50 +02:00
|
|
|
|
2018-10-07 16:24:48 +02:00
|
|
|
if user.forward_enabled:
|
|
|
|
destination = user.forward_destination
|
|
|
|
if user.forward_keep or ignore_forward_keep:
|
2019-12-27 22:11:50 +02:00
|
|
|
destination.append(email)
|
2018-10-07 16:24:48 +02:00
|
|
|
else:
|
2019-12-27 22:11:50 +02:00
|
|
|
destination = [email]
|
2021-07-30 22:26:20 +02:00
|
|
|
|
2018-10-07 16:24:48 +02:00
|
|
|
return destination
|
2017-11-10 12:55:58 +02:00
|
|
|
|
2019-01-11 12:51:56 +02:00
|
|
|
pure_alias = Alias.resolve(localpart, domain_name)
|
|
|
|
|
|
|
|
if pure_alias and not pure_alias.wildcard:
|
|
|
|
return pure_alias.destination
|
2021-01-15 14:53:47 +02:00
|
|
|
|
2021-07-30 22:26:20 +02:00
|
|
|
if stripped_alias := Alias.resolve(localpart_stripped, domain_name):
|
2019-01-11 12:51:56 +02:00
|
|
|
return stripped_alias.destination
|
2021-01-15 14:53:47 +02:00
|
|
|
|
|
|
|
if pure_alias:
|
2019-01-11 12:51:56 +02:00
|
|
|
return pure_alias.destination
|
|
|
|
|
2021-01-15 14:53:47 +02:00
|
|
|
return None
|
|
|
|
|
2016-03-19 21:37:48 +02:00
|
|
|
|
2016-10-16 17:18:00 +02:00
|
|
|
class User(Base, Email):
|
2016-05-01 20:04:40 +02:00
|
|
|
""" A user is an email address that has a password to access a mailbox.
|
2016-03-19 21:37:48 +02:00
|
|
|
"""
|
2020-12-21 00:45:27 +02:00
|
|
|
|
|
|
|
__tablename__ = 'user'
|
2021-02-12 00:14:09 +02:00
|
|
|
_ctx = None
|
2021-02-10 14:51:07 +02:00
|
|
|
_credential_cache = {}
|
2016-10-16 17:18:00 +02:00
|
|
|
|
2016-08-11 14:13:56 +02:00
|
|
|
domain = db.relationship(Domain,
|
|
|
|
backref=db.backref('users', cascade='all, delete-orphan'))
|
2016-03-19 21:37:48 +02:00
|
|
|
password = db.Column(db.String(255), nullable=False)
|
2021-01-08 15:22:11 +02:00
|
|
|
quota_bytes = db.Column(db.BigInteger, nullable=False, default=10**9)
|
|
|
|
quota_bytes_used = db.Column(db.BigInteger, nullable=False, default=0)
|
|
|
|
global_admin = db.Column(db.Boolean, nullable=False, default=False)
|
|
|
|
enabled = db.Column(db.Boolean, nullable=False, default=True)
|
2016-03-19 21:37:48 +02:00
|
|
|
|
2016-03-22 22:05:08 +02:00
|
|
|
# Features
|
2021-01-08 15:22:11 +02:00
|
|
|
enable_imap = db.Column(db.Boolean, nullable=False, default=True)
|
|
|
|
enable_pop = db.Column(db.Boolean, nullable=False, default=True)
|
2016-03-22 22:05:08 +02:00
|
|
|
|
2016-03-20 12:00:01 +02:00
|
|
|
# Filters
|
2021-01-08 15:22:11 +02:00
|
|
|
forward_enabled = db.Column(db.Boolean, nullable=False, default=False)
|
|
|
|
forward_destination = db.Column(CommaSeparatedList, nullable=True, default=list)
|
|
|
|
forward_keep = db.Column(db.Boolean, nullable=False, default=True)
|
|
|
|
reply_enabled = db.Column(db.Boolean, nullable=False, default=False)
|
2016-03-20 12:14:27 +02:00
|
|
|
reply_subject = db.Column(db.String(255), nullable=True, default=None)
|
2021-01-08 15:22:11 +02:00
|
|
|
reply_body = db.Column(db.Text, nullable=True, default=None)
|
2018-09-27 22:45:16 +02:00
|
|
|
reply_startdate = db.Column(db.Date, nullable=False,
|
|
|
|
default=date(1900, 1, 1))
|
2017-11-10 16:25:30 +02:00
|
|
|
reply_enddate = db.Column(db.Date, nullable=False,
|
|
|
|
default=date(2999, 12, 31))
|
2016-03-20 12:00:01 +02:00
|
|
|
|
2016-03-20 12:09:06 +02:00
|
|
|
# Settings
|
2020-12-21 00:45:27 +02:00
|
|
|
displayed_name = db.Column(db.String(160), nullable=False, default='')
|
2021-01-08 15:22:11 +02:00
|
|
|
spam_enabled = db.Column(db.Boolean, nullable=False, default=True)
|
|
|
|
spam_threshold = db.Column(db.Integer, nullable=False, default=80)
|
2016-03-20 12:09:06 +02:00
|
|
|
|
|
|
|
# Flask-login attributes
|
2016-03-19 21:37:48 +02:00
|
|
|
is_authenticated = True
|
|
|
|
is_active = True
|
|
|
|
is_anonymous = False
|
|
|
|
|
2016-04-20 21:14:04 +02:00
|
|
|
def get_id(self):
|
2021-01-15 14:53:47 +02:00
|
|
|
""" return users email address """
|
2016-05-01 20:04:40 +02:00
|
|
|
return self.email
|
2016-04-20 21:14:04 +02:00
|
|
|
|
2018-04-21 15:48:07 +02:00
|
|
|
@property
|
2018-07-26 21:38:21 +02:00
|
|
|
def destination(self):
|
2021-01-15 14:53:47 +02:00
|
|
|
""" returns comma separated string of destinations """
|
2018-09-26 00:14:46 +02:00
|
|
|
if self.forward_enabled:
|
2020-08-26 11:19:01 +02:00
|
|
|
result = list(self.forward_destination)
|
2018-07-26 21:38:21 +02:00
|
|
|
if self.forward_keep:
|
2020-08-26 11:19:01 +02:00
|
|
|
result.append(self.email)
|
|
|
|
return ','.join(result)
|
2018-07-26 21:38:21 +02:00
|
|
|
else:
|
|
|
|
return self.email
|
2018-04-21 15:48:07 +02:00
|
|
|
|
2018-10-15 21:52:06 +02:00
|
|
|
@property
|
|
|
|
def reply_active(self):
|
2021-01-15 14:53:47 +02:00
|
|
|
""" returns status of autoreply function """
|
2018-10-16 20:38:18 +02:00
|
|
|
now = date.today()
|
2018-10-15 21:52:06 +02:00
|
|
|
return (
|
|
|
|
self.reply_enabled and
|
|
|
|
self.reply_startdate < now and
|
|
|
|
self.reply_enddate > now
|
|
|
|
)
|
2018-04-21 15:48:07 +02:00
|
|
|
|
2021-08-08 09:21:14 +02:00
|
|
|
@property
|
|
|
|
def sender_limiter(self):
|
|
|
|
return utils.limiter.get_limiter(
|
|
|
|
app.config["MESSAGE_RATELIMIT"], "sender", self.email
|
|
|
|
)
|
|
|
|
|
2021-02-15 01:46:59 +02:00
|
|
|
@classmethod
|
|
|
|
def get_password_context(cls):
|
2021-03-09 14:31:21 +02:00
|
|
|
""" create password context for hashing and verification
|
2021-02-15 01:46:59 +02:00
|
|
|
"""
|
2021-03-09 14:31:21 +02:00
|
|
|
if cls._ctx:
|
|
|
|
return cls._ctx
|
2021-02-12 00:14:09 +02:00
|
|
|
|
2021-03-09 14:31:21 +02:00
|
|
|
schemes = passlib.registry.list_crypt_handlers()
|
2021-02-02 21:10:18 +02:00
|
|
|
# scrypt throws a warning if the native wheels aren't found
|
|
|
|
schemes.remove('scrypt')
|
|
|
|
# we can't leave plaintext schemes as they will be misidentified
|
|
|
|
for scheme in schemes:
|
|
|
|
if scheme.endswith('plaintext'):
|
|
|
|
schemes.remove(scheme)
|
2021-03-09 14:31:21 +02:00
|
|
|
cls._ctx = passlib.context.CryptContext(
|
2021-02-02 21:10:18 +02:00
|
|
|
schemes=schemes,
|
|
|
|
default='bcrypt_sha256',
|
|
|
|
bcrypt_sha256__rounds=app.config['CREDENTIAL_ROUNDS'],
|
|
|
|
deprecated='auto'
|
2018-10-18 17:55:07 +02:00
|
|
|
)
|
2021-03-09 14:31:21 +02:00
|
|
|
return cls._ctx
|
2016-03-19 21:37:48 +02:00
|
|
|
|
|
|
|
def check_password(self, password):
|
2021-03-09 14:31:21 +02:00
|
|
|
""" verifies password against stored hash
|
|
|
|
and updates hash if outdated
|
2021-01-15 14:53:47 +02:00
|
|
|
"""
|
2021-10-14 18:04:49 +02:00
|
|
|
if password == '':
|
|
|
|
return False
|
2021-06-04 09:51:58 +02:00
|
|
|
cache_result = self._credential_cache.get(self.get_id())
|
2021-02-10 14:51:07 +02:00
|
|
|
current_salt = self.password.split('$')[3] if len(self.password.split('$')) == 5 else None
|
|
|
|
if cache_result and current_salt:
|
|
|
|
cache_salt, cache_hash = cache_result
|
|
|
|
if cache_salt == current_salt:
|
2021-06-29 16:13:04 +02:00
|
|
|
return passlib.hash.pbkdf2_sha256.verify(password, cache_hash)
|
2021-02-10 14:51:07 +02:00
|
|
|
else:
|
|
|
|
# the cache is local per gunicorn; the password has changed
|
|
|
|
# so the local cache can be invalidated
|
2021-06-04 09:51:58 +02:00
|
|
|
del self._credential_cache[self.get_id()]
|
2021-02-02 21:10:18 +02:00
|
|
|
reference = self.password
|
2021-02-07 10:31:07 +02:00
|
|
|
# strip {scheme} if that's something mailu has added
|
|
|
|
# passlib will identify *crypt based hashes just fine
|
|
|
|
# on its own
|
2021-03-09 14:31:21 +02:00
|
|
|
if reference.startswith(('{PBKDF2}', '{BLF-CRYPT}', '{SHA512-CRYPT}', '{SHA256-CRYPT}', '{MD5-CRYPT}', '{CRYPT}')):
|
|
|
|
reference = reference.split('}', 1)[1]
|
2021-02-02 21:10:18 +02:00
|
|
|
|
2021-02-12 00:14:09 +02:00
|
|
|
result, new_hash = User.get_password_context().verify_and_update(password, reference)
|
2021-02-02 21:10:18 +02:00
|
|
|
if new_hash:
|
|
|
|
self.password = new_hash
|
2018-10-17 21:21:35 +02:00
|
|
|
db.session.add(self)
|
|
|
|
db.session.commit()
|
2021-02-10 14:51:07 +02:00
|
|
|
|
|
|
|
if result:
|
|
|
|
"""The credential cache uses a low number of rounds to be fast.
|
|
|
|
While it's not meant to be persisted to cold-storage, no additional measures
|
|
|
|
are taken to ensure it isn't (mlock(), encrypted swap, ...) on the basis that
|
|
|
|
we have little control over GC and string interning anyways.
|
|
|
|
|
|
|
|
An attacker that can dump the process' memory is likely to find credentials
|
|
|
|
in clear-text regardless of the presence of the cache.
|
|
|
|
"""
|
2021-06-29 16:13:04 +02:00
|
|
|
self._credential_cache[self.get_id()] = (self.password.split('$')[3], passlib.hash.pbkdf2_sha256.using(rounds=1).hash(password))
|
2018-10-17 21:21:35 +02:00
|
|
|
return result
|
2016-03-19 21:37:48 +02:00
|
|
|
|
2021-03-09 14:31:21 +02:00
|
|
|
def set_password(self, password, raw=False):
|
|
|
|
""" Set password for user
|
|
|
|
@password: plain text password to encrypt (or, if raw is True: the hash itself)
|
2017-08-24 16:23:54 +02:00
|
|
|
"""
|
2021-03-09 14:31:21 +02:00
|
|
|
self.password = password if raw else User.get_password_context().hash(password)
|
2016-03-19 21:37:48 +02:00
|
|
|
|
|
|
|
def get_managed_domains(self):
|
2021-01-15 14:53:47 +02:00
|
|
|
""" return list of domains this user can manage """
|
2016-03-19 21:37:48 +02:00
|
|
|
if self.global_admin:
|
|
|
|
return Domain.query.all()
|
|
|
|
else:
|
2016-04-24 19:42:02 +02:00
|
|
|
return self.manager_of
|
2016-03-19 21:37:48 +02:00
|
|
|
|
2016-06-19 15:34:14 +02:00
|
|
|
def get_managed_emails(self, include_aliases=True):
|
2021-01-15 14:53:47 +02:00
|
|
|
""" returns list of email addresses this user can manage """
|
2016-05-01 20:04:40 +02:00
|
|
|
emails = []
|
2016-03-22 21:34:21 +02:00
|
|
|
for domain in self.get_managed_domains():
|
2016-05-01 20:04:40 +02:00
|
|
|
emails.extend(domain.users)
|
2016-06-19 15:34:14 +02:00
|
|
|
if include_aliases:
|
|
|
|
emails.extend(domain.aliases)
|
2016-05-01 20:04:40 +02:00
|
|
|
return emails
|
2016-03-22 21:34:21 +02:00
|
|
|
|
2017-11-10 12:55:58 +02:00
|
|
|
def send_welcome(self):
|
2021-01-15 14:53:47 +02:00
|
|
|
""" send welcome email to user """
|
2020-12-21 00:50:26 +02:00
|
|
|
if app.config['WELCOME']:
|
2021-01-15 14:53:47 +02:00
|
|
|
self.sendmail(app.config['WELCOME_SUBJECT'], app.config['WELCOME_BODY'])
|
2017-11-10 12:55:58 +02:00
|
|
|
|
2018-11-08 21:30:41 +02:00
|
|
|
@classmethod
|
|
|
|
def get(cls, email):
|
2021-01-15 14:53:47 +02:00
|
|
|
""" find user object for email address """
|
2018-11-08 21:30:41 +02:00
|
|
|
return cls.query.get(email)
|
|
|
|
|
2016-03-19 21:37:48 +02:00
|
|
|
@classmethod
|
|
|
|
def login(cls, email, password):
|
2021-01-15 14:53:47 +02:00
|
|
|
""" login user when enabled and password is valid """
|
2016-05-01 20:09:47 +02:00
|
|
|
user = cls.query.get(email)
|
2018-04-15 13:42:08 +02:00
|
|
|
return user if (user and user.enabled and user.check_password(password)) else None
|
2016-03-19 21:37:48 +02:00
|
|
|
|
2021-02-06 18:23:05 +02:00
|
|
|
@classmethod
|
|
|
|
def get_temp_token(cls, email):
|
|
|
|
user = cls.query.get(email)
|
2021-06-29 16:13:04 +02:00
|
|
|
return hmac.new(app.temp_token_key, bytearray("{}|{}".format(time.strftime('%Y%m%d'), email), 'utf-8'), 'sha256').hexdigest() if (user and user.enabled) else None
|
2021-02-06 18:23:05 +02:00
|
|
|
|
|
|
|
def verify_temp_token(self, token):
|
2021-02-07 18:58:19 +02:00
|
|
|
return hmac.compare_digest(self.get_temp_token(self.email), token)
|
2021-02-06 18:23:05 +02:00
|
|
|
|
|
|
|
|
2016-03-19 21:37:48 +02:00
|
|
|
|
2016-10-16 17:18:00 +02:00
|
|
|
class Alias(Base, Email):
|
2016-05-01 20:04:40 +02:00
|
|
|
""" An alias is an email address that redirects to some destination.
|
2016-03-19 21:37:48 +02:00
|
|
|
"""
|
2020-12-21 00:45:27 +02:00
|
|
|
|
|
|
|
__tablename__ = 'alias'
|
2016-10-16 17:18:00 +02:00
|
|
|
|
2016-08-11 14:13:56 +02:00
|
|
|
domain = db.relationship(Domain,
|
|
|
|
backref=db.backref('aliases', cascade='all, delete-orphan'))
|
2021-01-08 15:22:11 +02:00
|
|
|
wildcard = db.Column(db.Boolean, nullable=False, default=False)
|
|
|
|
destination = db.Column(CommaSeparatedList, nullable=False, default=list)
|
2016-04-28 20:07:38 +02:00
|
|
|
|
2018-07-26 21:38:21 +02:00
|
|
|
@classmethod
|
|
|
|
def resolve(cls, localpart, domain_name):
|
2021-01-15 14:53:47 +02:00
|
|
|
""" find aliases matching email address localpart@domain_name """
|
|
|
|
|
2019-04-14 13:37:09 +02:00
|
|
|
alias_preserve_case = cls.query.filter(
|
|
|
|
sqlalchemy.and_(cls.domain_name == domain_name,
|
|
|
|
sqlalchemy.or_(
|
|
|
|
sqlalchemy.and_(
|
2021-07-03 11:40:32 +02:00
|
|
|
cls.wildcard == False,
|
2019-04-14 13:37:09 +02:00
|
|
|
cls.localpart == localpart
|
|
|
|
), sqlalchemy.and_(
|
2021-07-03 11:40:32 +02:00
|
|
|
cls.wildcard == True,
|
2020-12-21 00:45:27 +02:00
|
|
|
sqlalchemy.bindparam('l', localpart).like(cls.localpart)
|
2019-04-14 13:37:09 +02:00
|
|
|
)
|
2018-09-27 14:53:23 +02:00
|
|
|
)
|
2018-07-26 21:38:21 +02:00
|
|
|
)
|
2019-04-14 13:37:09 +02:00
|
|
|
).order_by(cls.wildcard, sqlalchemy.func.char_length(cls.localpart).desc()).first()
|
2018-07-26 21:38:21 +02:00
|
|
|
|
2020-03-06 14:50:39 +02:00
|
|
|
localpart_lower = localpart.lower() if localpart else None
|
|
|
|
alias_lower_case = cls.query.filter(
|
2019-04-14 13:37:09 +02:00
|
|
|
sqlalchemy.and_(cls.domain_name == domain_name,
|
|
|
|
sqlalchemy.or_(
|
|
|
|
sqlalchemy.and_(
|
2021-07-03 11:40:32 +02:00
|
|
|
cls.wildcard == False,
|
2020-03-06 14:50:39 +02:00
|
|
|
sqlalchemy.func.lower(cls.localpart) == localpart_lower
|
2019-04-14 13:37:09 +02:00
|
|
|
), sqlalchemy.and_(
|
2021-07-03 11:40:32 +02:00
|
|
|
cls.wildcard == True,
|
2021-01-15 14:53:47 +02:00
|
|
|
sqlalchemy.bindparam('l', localpart_lower).like(
|
|
|
|
sqlalchemy.func.lower(cls.localpart))
|
2019-04-14 13:37:09 +02:00
|
|
|
)
|
|
|
|
)
|
|
|
|
)
|
2021-01-15 14:53:47 +02:00
|
|
|
).order_by(cls.wildcard, sqlalchemy.func.char_length(
|
|
|
|
sqlalchemy.func.lower(cls.localpart)).desc()).first()
|
2016-04-28 20:07:38 +02:00
|
|
|
|
2020-03-06 14:50:39 +02:00
|
|
|
if alias_preserve_case and alias_lower_case:
|
2021-01-15 14:53:47 +02:00
|
|
|
return alias_lower_case if alias_preserve_case.wildcard else alias_preserve_case
|
|
|
|
|
|
|
|
if alias_preserve_case and not alias_lower_case:
|
2020-03-06 14:50:39 +02:00
|
|
|
return alias_preserve_case
|
2021-01-15 14:53:47 +02:00
|
|
|
|
|
|
|
if alias_lower_case and not alias_preserve_case:
|
2020-03-06 14:50:39 +02:00
|
|
|
return alias_lower_case
|
|
|
|
|
2021-01-15 14:53:47 +02:00
|
|
|
return None
|
|
|
|
|
2021-02-15 01:46:59 +02:00
|
|
|
|
2017-10-29 15:48:34 +02:00
|
|
|
class Token(Base):
|
|
|
|
""" A token is an application password for a given user.
|
|
|
|
"""
|
2020-12-21 00:45:27 +02:00
|
|
|
|
|
|
|
__tablename__ = 'token'
|
2017-10-29 15:48:34 +02:00
|
|
|
|
2021-01-08 15:22:11 +02:00
|
|
|
id = db.Column(db.Integer, primary_key=True)
|
2017-10-29 15:48:34 +02:00
|
|
|
user_email = db.Column(db.String(255), db.ForeignKey(User.email),
|
|
|
|
nullable=False)
|
|
|
|
user = db.relationship(User,
|
|
|
|
backref=db.backref('tokens', cascade='all, delete-orphan'))
|
|
|
|
password = db.Column(db.String(255), nullable=False)
|
|
|
|
ip = db.Column(db.String(255))
|
|
|
|
|
|
|
|
def check_password(self, password):
|
2021-03-09 14:31:21 +02:00
|
|
|
""" verifies password against stored hash
|
|
|
|
and updates hash if outdated
|
|
|
|
"""
|
2021-01-21 21:37:25 +02:00
|
|
|
if self.password.startswith("$5$"):
|
2021-03-09 14:31:21 +02:00
|
|
|
if passlib.hash.sha256_crypt.verify(password, self.password):
|
2021-01-21 21:37:25 +02:00
|
|
|
self.set_password(password)
|
|
|
|
db.session.add(self)
|
|
|
|
db.session.commit()
|
|
|
|
return True
|
|
|
|
return False
|
2021-03-09 14:31:21 +02:00
|
|
|
return passlib.hash.pbkdf2_sha256.verify(password, self.password)
|
2017-10-29 15:48:34 +02:00
|
|
|
|
|
|
|
def set_password(self, password):
|
2021-03-09 14:31:21 +02:00
|
|
|
""" sets password using pbkdf2_sha256 (1 round) """
|
2021-01-21 21:37:25 +02:00
|
|
|
# tokens have 128bits of entropy, they are not bruteforceable
|
2021-03-09 14:31:21 +02:00
|
|
|
self.password = passlib.hash.pbkdf2_sha256.using(rounds=1).hash(password)
|
2017-10-29 15:48:34 +02:00
|
|
|
|
2021-02-15 01:46:59 +02:00
|
|
|
def __repr__(self):
|
|
|
|
return f'<Token #{self.id}: {self.comment or self.ip or self.password}>'
|
2017-10-29 15:48:34 +02:00
|
|
|
|
|
|
|
|
2016-04-28 20:07:38 +02:00
|
|
|
class Fetch(Base):
|
2020-08-26 11:11:23 +02:00
|
|
|
""" A fetched account is a remote POP/IMAP account fetched into a local
|
2016-04-28 20:07:38 +02:00
|
|
|
account.
|
|
|
|
"""
|
2020-12-21 00:45:27 +02:00
|
|
|
|
|
|
|
__tablename__ = 'fetch'
|
2016-10-16 17:18:00 +02:00
|
|
|
|
2021-01-08 15:22:11 +02:00
|
|
|
id = db.Column(db.Integer, primary_key=True)
|
2018-12-10 15:58:18 +02:00
|
|
|
user_email = db.Column(db.String(255), db.ForeignKey(User.email),
|
2016-04-28 20:07:38 +02:00
|
|
|
nullable=False)
|
2016-08-11 14:13:56 +02:00
|
|
|
user = db.relationship(User,
|
|
|
|
backref=db.backref('fetches', cascade='all, delete-orphan'))
|
2018-12-10 16:03:12 +02:00
|
|
|
protocol = db.Column(db.Enum('imap', 'pop3'), nullable=False)
|
2016-04-28 20:07:38 +02:00
|
|
|
host = db.Column(db.String(255), nullable=False)
|
2021-01-08 15:22:11 +02:00
|
|
|
port = db.Column(db.Integer, nullable=False)
|
|
|
|
tls = db.Column(db.Boolean, nullable=False, default=False)
|
2016-04-28 20:07:38 +02:00
|
|
|
username = db.Column(db.String(255), nullable=False)
|
|
|
|
password = db.Column(db.String(255), nullable=False)
|
2021-01-08 15:22:11 +02:00
|
|
|
keep = db.Column(db.Boolean, nullable=False, default=False)
|
2016-09-10 13:05:55 +02:00
|
|
|
last_check = db.Column(db.DateTime, nullable=True)
|
|
|
|
error = db.Column(db.String(1023), nullable=True)
|
2020-08-26 11:27:38 +02:00
|
|
|
|
2021-02-15 01:46:59 +02:00
|
|
|
def __repr__(self):
|
|
|
|
return (
|
|
|
|
f'<Fetch #{self.id}: {self.protocol}{"s" if self.tls else ""}:'
|
|
|
|
f'//{self.username}@{self.host}:{self.port}>'
|
|
|
|
)
|
2021-01-14 02:11:04 +02:00
|
|
|
|
|
|
|
|
|
|
|
class MailuConfig:
|
|
|
|
""" Class which joins whole Mailu config for dumping
|
|
|
|
and loading
|
|
|
|
"""
|
|
|
|
|
|
|
|
class MailuCollection:
|
2021-01-24 20:07:48 +02:00
|
|
|
""" Provides dict- and list-like access to instances
|
2021-01-14 02:11:04 +02:00
|
|
|
of a sqlalchemy model
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, model : db.Model):
|
2021-01-24 20:07:48 +02:00
|
|
|
self.model = model
|
|
|
|
|
2021-02-15 01:46:59 +02:00
|
|
|
def __repr__(self):
|
2021-01-24 20:07:48 +02:00
|
|
|
return f'<{self.model.__name__}-Collection>'
|
2021-01-14 02:11:04 +02:00
|
|
|
|
|
|
|
@cached_property
|
|
|
|
def _items(self):
|
|
|
|
return {
|
|
|
|
inspect(item).identity: item
|
2021-01-24 20:07:48 +02:00
|
|
|
for item in self.model.query.all()
|
2021-01-14 02:11:04 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
def __len__(self):
|
|
|
|
return len(self._items)
|
|
|
|
|
|
|
|
def __iter__(self):
|
|
|
|
return iter(self._items.values())
|
|
|
|
|
|
|
|
def __getitem__(self, key):
|
|
|
|
return self._items[key]
|
|
|
|
|
|
|
|
def __setitem__(self, key, item):
|
2021-01-24 20:07:48 +02:00
|
|
|
if not isinstance(item, self.model):
|
|
|
|
raise TypeError(f'expected {self.model.name}')
|
2021-01-14 02:11:04 +02:00
|
|
|
if key != inspect(item).identity:
|
|
|
|
raise ValueError(f'item identity != key {key!r}')
|
|
|
|
self._items[key] = item
|
|
|
|
|
|
|
|
def __delitem__(self, key):
|
|
|
|
del self._items[key]
|
|
|
|
|
2021-01-24 20:07:48 +02:00
|
|
|
def append(self, item, update=False):
|
2021-01-14 02:11:04 +02:00
|
|
|
""" list-like append """
|
2021-01-24 20:07:48 +02:00
|
|
|
if not isinstance(item, self.model):
|
|
|
|
raise TypeError(f'expected {self.model.name}')
|
2021-01-14 02:11:04 +02:00
|
|
|
key = inspect(item).identity
|
|
|
|
if key in self._items:
|
2021-01-24 20:07:48 +02:00
|
|
|
if not update:
|
|
|
|
raise ValueError(f'item {key!r} already present in collection')
|
2021-01-14 02:11:04 +02:00
|
|
|
self._items[key] = item
|
|
|
|
|
2021-01-24 20:07:48 +02:00
|
|
|
def extend(self, items, update=False):
|
2021-01-14 02:11:04 +02:00
|
|
|
""" list-like extend """
|
|
|
|
add = {}
|
|
|
|
for item in items:
|
2021-01-24 20:07:48 +02:00
|
|
|
if not isinstance(item, self.model):
|
|
|
|
raise TypeError(f'expected {self.model.name}')
|
2021-01-14 02:11:04 +02:00
|
|
|
key = inspect(item).identity
|
2021-01-24 20:07:48 +02:00
|
|
|
if not update and key in self._items:
|
2021-01-14 02:11:04 +02:00
|
|
|
raise ValueError(f'item {key!r} already present in collection')
|
|
|
|
add[key] = item
|
|
|
|
self._items.update(add)
|
|
|
|
|
|
|
|
def pop(self, *args):
|
|
|
|
""" list-like (no args) and dict-like (1 or 2 args) pop """
|
|
|
|
if args:
|
|
|
|
if len(args) > 2:
|
|
|
|
raise TypeError(f'pop expected at most 2 arguments, got {len(args)}')
|
|
|
|
return self._items.pop(*args)
|
|
|
|
else:
|
|
|
|
return self._items.popitem()[1]
|
|
|
|
|
|
|
|
def popitem(self):
|
|
|
|
""" dict-like popitem """
|
|
|
|
return self._items.popitem()
|
|
|
|
|
|
|
|
def remove(self, item):
|
|
|
|
""" list-like remove """
|
2021-01-24 20:07:48 +02:00
|
|
|
if not isinstance(item, self.model):
|
|
|
|
raise TypeError(f'expected {self.model.name}')
|
2021-01-14 02:11:04 +02:00
|
|
|
key = inspect(item).identity
|
|
|
|
if not key in self._items:
|
|
|
|
raise ValueError(f'item {key!r} not found in collection')
|
|
|
|
del self._items[key]
|
|
|
|
|
|
|
|
def clear(self):
|
|
|
|
""" dict-like clear """
|
|
|
|
while True:
|
|
|
|
try:
|
|
|
|
self.pop()
|
|
|
|
except IndexError:
|
|
|
|
break
|
|
|
|
|
|
|
|
def update(self, items):
|
|
|
|
""" dict-like update """
|
|
|
|
for key, item in items:
|
2021-01-24 20:07:48 +02:00
|
|
|
if not isinstance(item, self.model):
|
|
|
|
raise TypeError(f'expected {self.model.name}')
|
2021-01-14 02:11:04 +02:00
|
|
|
if key != inspect(item).identity:
|
|
|
|
raise ValueError(f'item identity != key {key!r}')
|
2021-01-24 20:07:48 +02:00
|
|
|
self._items.update(items)
|
2021-01-14 02:11:04 +02:00
|
|
|
|
|
|
|
def setdefault(self, key, item=None):
|
|
|
|
""" dict-like setdefault """
|
|
|
|
if key in self._items:
|
|
|
|
return self._items[key]
|
|
|
|
if item is None:
|
|
|
|
return None
|
2021-01-24 20:07:48 +02:00
|
|
|
if not isinstance(item, self.model):
|
|
|
|
raise TypeError(f'expected {self.model.name}')
|
2021-01-14 02:11:04 +02:00
|
|
|
if key != inspect(item).identity:
|
|
|
|
raise ValueError(f'item identity != key {key!r}')
|
|
|
|
self._items[key] = item
|
|
|
|
return item
|
|
|
|
|
2021-01-24 20:07:48 +02:00
|
|
|
def __init__(self):
|
|
|
|
|
|
|
|
# section-name -> attr
|
|
|
|
self._sections = {
|
|
|
|
name: getattr(self, name)
|
|
|
|
for name in dir(self)
|
|
|
|
if isinstance(getattr(self, name), self.MailuCollection)
|
|
|
|
}
|
|
|
|
|
|
|
|
# known models
|
|
|
|
self._models = tuple(section.model for section in self._sections.values())
|
|
|
|
|
|
|
|
# model -> attr
|
|
|
|
self._sections.update({
|
|
|
|
section.model: section for section in self._sections.values()
|
|
|
|
})
|
|
|
|
|
|
|
|
def _get_model(self, section):
|
|
|
|
if section is None:
|
|
|
|
return None
|
|
|
|
model = self._sections.get(section)
|
|
|
|
if model is None:
|
|
|
|
raise ValueError(f'Invalid section: {section!r}')
|
|
|
|
if isinstance(model, self.MailuCollection):
|
|
|
|
return model.model
|
|
|
|
return model
|
|
|
|
|
|
|
|
def _add(self, items, section, update):
|
|
|
|
|
|
|
|
model = self._get_model(section)
|
|
|
|
if isinstance(items, self._models):
|
|
|
|
items = [items]
|
|
|
|
elif not hasattr(items, '__iter__'):
|
|
|
|
raise ValueError(f'{items!r} is not iterable')
|
|
|
|
|
|
|
|
for item in items:
|
|
|
|
if model is not None and not isinstance(item, model):
|
|
|
|
what = item.__class__.__name__.capitalize()
|
|
|
|
raise ValueError(f'{what} can not be added to section {section!r}')
|
|
|
|
self._sections[type(item)].append(item, update=update)
|
|
|
|
|
|
|
|
def add(self, items, section=None):
|
|
|
|
""" add item to config """
|
|
|
|
self._add(items, section, update=False)
|
|
|
|
|
|
|
|
def update(self, items, section=None):
|
|
|
|
""" add or replace item in config """
|
|
|
|
self._add(items, section, update=True)
|
|
|
|
|
|
|
|
def remove(self, items, section=None):
|
|
|
|
""" remove item from config """
|
|
|
|
model = self._get_model(section)
|
|
|
|
if isinstance(items, self._models):
|
|
|
|
items = [items]
|
|
|
|
elif not hasattr(items, '__iter__'):
|
|
|
|
raise ValueError(f'{items!r} is not iterable')
|
|
|
|
|
|
|
|
for item in items:
|
|
|
|
if isinstance(item, str):
|
|
|
|
if section is None:
|
|
|
|
raise ValueError(f'Cannot remove key {item!r} without section')
|
|
|
|
del self._sections[model][item]
|
|
|
|
elif model is not None and not isinstance(item, model):
|
|
|
|
what = item.__class__.__name__.capitalize()
|
|
|
|
raise ValueError(f'{what} can not be removed from section {section!r}')
|
|
|
|
self._sections[type(item)].remove(item,)
|
|
|
|
|
|
|
|
def clear(self, models=None):
|
|
|
|
""" remove complete configuration """
|
|
|
|
for model in self._models:
|
|
|
|
if models is None or model in models:
|
|
|
|
db.session.query(model).delete()
|
|
|
|
|
2021-02-19 19:01:02 +02:00
|
|
|
def check(self):
|
|
|
|
""" check for duplicate domain names """
|
|
|
|
dup = set()
|
|
|
|
for fqdn in chain(
|
|
|
|
db.session.query(Domain.name),
|
|
|
|
db.session.query(Alternative.name),
|
|
|
|
db.session.query(Relay.name)
|
|
|
|
):
|
|
|
|
if fqdn in dup:
|
|
|
|
raise ValueError(f'Duplicate domain name: {fqdn}')
|
|
|
|
dup.add(fqdn)
|
|
|
|
|
2021-02-15 01:46:59 +02:00
|
|
|
domain = MailuCollection(Domain)
|
|
|
|
user = MailuCollection(User)
|
|
|
|
alias = MailuCollection(Alias)
|
|
|
|
relay = MailuCollection(Relay)
|
2021-01-14 02:11:04 +02:00
|
|
|
config = MailuCollection(Config)
|