2018-12-10 16:01:27 +02:00
|
|
|
from mailu import dkim
|
2016-02-20 14:57:26 +02:00
|
|
|
|
2016-03-19 21:37:48 +02:00
|
|
|
from sqlalchemy.ext import declarative
|
2017-11-10 16:25:30 +02:00
|
|
|
from datetime import datetime, date
|
2017-11-10 12:55:58 +02:00
|
|
|
from email.mime import text
|
2018-10-18 15:57:43 +02:00
|
|
|
from flask import current_app as app
|
2020-08-26 11:27:38 +02:00
|
|
|
from textwrap import wrap
|
2017-11-10 12:55:58 +02:00
|
|
|
|
2018-10-18 15:57:43 +02:00
|
|
|
import flask_sqlalchemy
|
2018-07-26 21:38:21 +02:00
|
|
|
import sqlalchemy
|
2016-03-20 16:36:56 +02:00
|
|
|
import re
|
2016-06-25 15:50:05 +02:00
|
|
|
import time
|
|
|
|
import os
|
2021-01-06 17:45:55 +02:00
|
|
|
import passlib
|
2016-06-25 15:50:05 +02:00
|
|
|
import glob
|
2017-11-10 12:55:58 +02:00
|
|
|
import smtplib
|
2018-04-12 21:35:38 +02:00
|
|
|
import idna
|
2018-04-21 16:37:30 +02:00
|
|
|
import dns
|
2020-08-26 11:27:38 +02:00
|
|
|
import json
|
|
|
|
import itertools
|
2018-04-12 21:35:38 +02:00
|
|
|
|
|
|
|
|
2018-10-18 15:57:43 +02:00
|
|
|
db = flask_sqlalchemy.SQLAlchemy()
|
|
|
|
|
|
|
|
|
2018-04-14 13:00:29 +02:00
|
|
|
class IdnaDomain(db.TypeDecorator):
|
2018-04-12 21:35:38 +02:00
|
|
|
""" Stores a Unicode string in it's IDNA representation (ASCII only)
|
|
|
|
"""
|
|
|
|
|
2018-12-09 17:06:53 +02:00
|
|
|
impl = db.String(80)
|
2018-04-12 21:35:38 +02:00
|
|
|
|
|
|
|
def process_bind_param(self, value, dialect):
|
2020-12-21 00:45:27 +02:00
|
|
|
return idna.encode(value).decode('ascii').lower()
|
2018-04-12 21:35:38 +02:00
|
|
|
|
|
|
|
def process_result_value(self, value, dialect):
|
|
|
|
return idna.decode(value)
|
2016-03-20 16:36:56 +02:00
|
|
|
|
2020-08-26 11:27:38 +02:00
|
|
|
python_type = str
|
2016-03-19 21:37:48 +02:00
|
|
|
|
2018-04-14 13:00:29 +02:00
|
|
|
class IdnaEmail(db.TypeDecorator):
|
|
|
|
""" Stores a Unicode string in it's IDNA representation (ASCII only)
|
|
|
|
"""
|
|
|
|
|
2018-12-09 17:06:53 +02:00
|
|
|
impl = db.String(255)
|
2018-04-14 13:00:29 +02:00
|
|
|
|
|
|
|
def process_bind_param(self, value, dialect):
|
2018-11-02 12:14:23 +02:00
|
|
|
try:
|
|
|
|
localpart, domain_name = value.split('@')
|
2020-12-21 00:45:27 +02:00
|
|
|
return '{0}@{1}'.format(
|
2018-11-02 12:14:23 +02:00
|
|
|
localpart,
|
|
|
|
idna.encode(domain_name).decode('ascii'),
|
2018-12-06 18:04:48 +02:00
|
|
|
).lower()
|
2018-11-05 19:36:28 +02:00
|
|
|
except ValueError:
|
2018-11-02 12:14:23 +02:00
|
|
|
pass
|
2018-04-14 13:00:29 +02:00
|
|
|
|
|
|
|
def process_result_value(self, value, dialect):
|
|
|
|
localpart, domain_name = value.split('@')
|
2020-12-21 00:45:27 +02:00
|
|
|
return '{0}@{1}'.format(
|
2018-04-14 13:00:29 +02:00
|
|
|
localpart,
|
|
|
|
idna.decode(domain_name),
|
|
|
|
)
|
2016-02-20 14:57:26 +02:00
|
|
|
|
2020-08-26 11:27:38 +02:00
|
|
|
python_type = str
|
2016-02-20 21:11:59 +02:00
|
|
|
|
2016-08-11 13:33:04 +02:00
|
|
|
class CommaSeparatedList(db.TypeDecorator):
|
|
|
|
""" Stores a list as a comma-separated string, compatible with Postfix.
|
|
|
|
"""
|
|
|
|
|
|
|
|
impl = db.String
|
|
|
|
|
|
|
|
def process_bind_param(self, value, dialect):
|
2020-08-26 23:16:37 +02:00
|
|
|
if not isinstance(value, (list, set)):
|
2020-12-21 00:50:26 +02:00
|
|
|
raise TypeError('Must be a list')
|
2016-08-11 13:33:04 +02:00
|
|
|
for item in value:
|
2020-12-21 00:45:27 +02:00
|
|
|
if ',' in item:
|
|
|
|
raise ValueError('Item must not contain a comma')
|
|
|
|
return ','.join(sorted(value))
|
2016-08-11 13:33:04 +02:00
|
|
|
|
|
|
|
def process_result_value(self, value, dialect):
|
2020-12-21 00:45:27 +02:00
|
|
|
return list(filter(bool, value.split(','))) if value else []
|
2016-08-11 13:33:04 +02:00
|
|
|
|
2020-08-26 11:27:38 +02:00
|
|
|
python_type = list
|
2016-08-11 13:33:04 +02:00
|
|
|
|
2018-10-18 15:57:43 +02:00
|
|
|
class JSONEncoded(db.TypeDecorator):
|
2020-08-26 11:11:23 +02:00
|
|
|
""" Represents an immutable structure as a json-encoded string.
|
2018-10-18 15:57:43 +02:00
|
|
|
"""
|
|
|
|
|
|
|
|
impl = db.String
|
|
|
|
|
|
|
|
def process_bind_param(self, value, dialect):
|
|
|
|
return json.dumps(value) if value else None
|
|
|
|
|
|
|
|
def process_result_value(self, value, dialect):
|
|
|
|
return json.loads(value) if value else None
|
|
|
|
|
2020-08-26 11:27:38 +02:00
|
|
|
python_type = str
|
2018-10-18 15:57:43 +02:00
|
|
|
|
2018-12-09 17:06:53 +02:00
|
|
|
class Base(db.Model):
|
|
|
|
""" Base class for all models
|
2018-10-18 15:57:43 +02:00
|
|
|
"""
|
|
|
|
|
2018-12-09 17:06:53 +02:00
|
|
|
__abstract__ = True
|
|
|
|
|
|
|
|
metadata = sqlalchemy.schema.MetaData(
|
|
|
|
naming_convention={
|
2020-12-21 00:45:27 +02:00
|
|
|
'fk': '%(table_name)s_%(column_0_name)s_fkey',
|
|
|
|
'pk': '%(table_name)s_pkey'
|
2018-12-09 17:06:53 +02:00
|
|
|
}
|
|
|
|
)
|
|
|
|
|
2019-01-15 17:41:58 +02:00
|
|
|
created_at = db.Column(db.Date, nullable=False, default=date.today)
|
|
|
|
updated_at = db.Column(db.Date, nullable=True, onupdate=date.today)
|
2018-12-09 17:06:53 +02:00
|
|
|
comment = db.Column(db.String(255), nullable=True)
|
2018-10-18 15:57:43 +02:00
|
|
|
|
2020-08-26 11:27:38 +02:00
|
|
|
@classmethod
|
2021-01-06 17:45:55 +02:00
|
|
|
def _dict_pkey(cls):
|
|
|
|
return cls.__mapper__.primary_key[0].name
|
2020-08-26 11:27:38 +02:00
|
|
|
|
|
|
|
def _dict_pval(self):
|
|
|
|
return getattr(self, self._dict_pkey())
|
|
|
|
|
2020-10-24 22:31:32 +02:00
|
|
|
def to_dict(self, full=False, include_secrets=False, include_extra=None, recursed=False, hide=None):
|
2020-08-26 11:27:38 +02:00
|
|
|
""" Return a dictionary representation of this model.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if recursed and not getattr(self, '_dict_recurse', False):
|
|
|
|
return str(self)
|
|
|
|
|
|
|
|
hide = set(hide or []) | {'created_at', 'updated_at'}
|
|
|
|
if hasattr(self, '_dict_hide'):
|
|
|
|
hide |= self._dict_hide
|
|
|
|
|
|
|
|
secret = set()
|
|
|
|
if not include_secrets and hasattr(self, '_dict_secret'):
|
|
|
|
secret |= self._dict_secret
|
|
|
|
|
|
|
|
convert = getattr(self, '_dict_output', {})
|
|
|
|
|
2020-10-24 22:31:32 +02:00
|
|
|
extra_keys = getattr(self, '_dict_extra', {})
|
|
|
|
if include_extra is None:
|
|
|
|
include_extra = []
|
|
|
|
|
2020-08-26 11:27:38 +02:00
|
|
|
res = {}
|
|
|
|
|
2020-10-24 22:31:32 +02:00
|
|
|
for key in itertools.chain(
|
|
|
|
self.__table__.columns.keys(),
|
|
|
|
getattr(self, '_dict_show', []),
|
|
|
|
*[extra_keys.get(extra, []) for extra in include_extra]
|
|
|
|
):
|
2020-08-26 11:27:38 +02:00
|
|
|
if key in hide:
|
|
|
|
continue
|
|
|
|
if key in self.__table__.columns:
|
|
|
|
default = self.__table__.columns[key].default
|
|
|
|
if isinstance(default, sqlalchemy.sql.schema.ColumnDefault):
|
|
|
|
default = default.arg
|
|
|
|
else:
|
|
|
|
default = None
|
|
|
|
value = getattr(self, key)
|
|
|
|
if full or ((default or value) and value != default):
|
|
|
|
if key in secret:
|
|
|
|
value = '<hidden>'
|
|
|
|
elif value is not None and key in convert:
|
|
|
|
value = convert[key](value)
|
|
|
|
res[key] = value
|
|
|
|
|
|
|
|
for key in self.__mapper__.relationships.keys():
|
|
|
|
if key in hide:
|
|
|
|
continue
|
|
|
|
if self.__mapper__.relationships[key].uselist:
|
|
|
|
items = getattr(self, key)
|
|
|
|
if self.__mapper__.relationships[key].query_class is not None:
|
|
|
|
if hasattr(items, 'all'):
|
|
|
|
items = items.all()
|
|
|
|
if full or len(items):
|
|
|
|
if key in secret:
|
|
|
|
res[key] = '<hidden>'
|
|
|
|
else:
|
2020-10-24 22:31:32 +02:00
|
|
|
res[key] = [item.to_dict(full, include_secrets, include_extra, True) for item in items]
|
2020-08-26 11:27:38 +02:00
|
|
|
else:
|
|
|
|
value = getattr(self, key)
|
|
|
|
if full or value is not None:
|
|
|
|
if key in secret:
|
|
|
|
res[key] = '<hidden>'
|
|
|
|
else:
|
2020-10-24 22:31:32 +02:00
|
|
|
res[key] = value.to_dict(full, include_secrets, include_extra, True)
|
2020-08-26 11:27:38 +02:00
|
|
|
|
|
|
|
return res
|
|
|
|
|
|
|
|
@classmethod
|
2021-01-06 17:45:55 +02:00
|
|
|
def from_dict(cls, data, delete=False):
|
2020-08-26 11:27:38 +02:00
|
|
|
|
|
|
|
changed = []
|
|
|
|
|
2021-01-06 17:45:55 +02:00
|
|
|
pkey = cls._dict_pkey()
|
2020-08-26 11:27:38 +02:00
|
|
|
|
|
|
|
# handle "primary key" only
|
2021-01-06 17:45:55 +02:00
|
|
|
if isinstance(data, dict):
|
2020-08-26 11:27:38 +02:00
|
|
|
data = {pkey: data}
|
|
|
|
|
|
|
|
# modify input data
|
2021-01-06 17:45:55 +02:00
|
|
|
if hasattr(cls, '_dict_input'):
|
2020-08-26 11:27:38 +02:00
|
|
|
try:
|
2021-01-06 17:45:55 +02:00
|
|
|
cls._dict_input(data)
|
2020-08-26 11:27:38 +02:00
|
|
|
except Exception as reason:
|
2021-01-06 17:45:55 +02:00
|
|
|
raise ValueError(f'{reason}', cls, None, data)
|
2020-08-26 11:27:38 +02:00
|
|
|
|
|
|
|
# check for primary key (if not recursed)
|
2021-01-06 17:45:55 +02:00
|
|
|
if not getattr(cls, '_dict_recurse', False):
|
2020-08-26 11:27:38 +02:00
|
|
|
if not pkey in data:
|
2021-01-06 17:45:55 +02:00
|
|
|
raise KeyError(f'primary key {cls.__table__}.{pkey} is missing', cls, pkey, data)
|
2020-08-26 11:27:38 +02:00
|
|
|
|
|
|
|
# check data keys and values
|
2020-10-24 22:31:29 +02:00
|
|
|
for key in list(data.keys()):
|
2020-08-26 11:27:38 +02:00
|
|
|
|
|
|
|
# check key
|
2021-01-06 17:45:55 +02:00
|
|
|
if not hasattr(cls, key) and not key in cls.__mapper__.relationships:
|
|
|
|
raise KeyError(f'unknown key {cls.__table__}.{key}', cls, key, data)
|
2020-08-26 11:27:38 +02:00
|
|
|
|
|
|
|
# check value type
|
2020-10-24 22:31:29 +02:00
|
|
|
value = data[key]
|
2021-01-06 17:45:55 +02:00
|
|
|
col = cls.__mapper__.columns.get(key)
|
2020-08-26 11:27:38 +02:00
|
|
|
if col is not None:
|
2021-01-06 17:45:55 +02:00
|
|
|
if not ((value is None and col.nullable) or (isinstance(value, col.type.python_type))):
|
|
|
|
raise TypeError(f'{cls.__table__}.{key} {value!r} has invalid type {type(value).__name__!r}', cls, key, data)
|
2020-08-26 11:27:38 +02:00
|
|
|
else:
|
2021-01-06 17:45:55 +02:00
|
|
|
rel = cls.__mapper__.relationships.get(key)
|
2020-08-26 11:27:38 +02:00
|
|
|
if rel is None:
|
2021-01-06 17:45:55 +02:00
|
|
|
itype = getattr(cls, '_dict_types', {}).get(key)
|
2020-08-26 11:27:38 +02:00
|
|
|
if itype is not None:
|
2020-12-21 00:45:27 +02:00
|
|
|
if itype is False: # ignore value. TODO: emit warning?
|
2020-10-24 22:31:29 +02:00
|
|
|
del data[key]
|
|
|
|
continue
|
|
|
|
elif not isinstance(value, itype):
|
2021-01-06 17:45:55 +02:00
|
|
|
raise TypeError(f'{cls.__table__}.{key} {value!r} has invalid type {type(value).__name__!r}', cls, key, data)
|
2020-08-26 11:27:38 +02:00
|
|
|
else:
|
2021-01-06 17:45:55 +02:00
|
|
|
raise NotImplementedError(f'type not defined for {cls.__table__}.{key}')
|
2020-08-26 11:27:38 +02:00
|
|
|
|
|
|
|
# handle relationships
|
2021-01-06 17:45:55 +02:00
|
|
|
if key in cls.__mapper__.relationships:
|
|
|
|
rel_model = cls.__mapper__.relationships[key].argument
|
2020-08-26 11:27:38 +02:00
|
|
|
if not isinstance(rel_model, sqlalchemy.orm.Mapper):
|
|
|
|
add = rel_model.from_dict(value, delete)
|
|
|
|
assert len(add) == 1
|
2020-08-26 23:16:37 +02:00
|
|
|
rel_item, updated = add[0]
|
|
|
|
changed.append((rel_item, updated))
|
|
|
|
data[key] = rel_item
|
2020-08-26 11:27:38 +02:00
|
|
|
|
2020-08-26 23:16:37 +02:00
|
|
|
# create item if necessary
|
|
|
|
created = False
|
2021-01-06 17:45:55 +02:00
|
|
|
item = cls.query.get(data[pkey]) if pkey in data else None
|
2020-08-26 11:27:38 +02:00
|
|
|
if item is None:
|
|
|
|
|
|
|
|
# check for mandatory keys
|
2021-01-06 17:45:55 +02:00
|
|
|
missing = getattr(cls, '_dict_mandatory', set()) - set(data.keys())
|
2020-08-26 11:27:38 +02:00
|
|
|
if missing:
|
2021-01-06 17:45:55 +02:00
|
|
|
raise ValueError(f'mandatory key(s) {", ".join(sorted(missing))} for {cls.__table__} missing', cls, missing, data)
|
2020-08-26 11:27:38 +02:00
|
|
|
|
2020-08-26 23:16:37 +02:00
|
|
|
# remove mapped relationships from data
|
|
|
|
mapped = {}
|
|
|
|
for key in list(data.keys()):
|
2021-01-06 17:45:55 +02:00
|
|
|
if key in cls.__mapper__.relationships:
|
|
|
|
if isinstance(cls.__mapper__.relationships[key].argument, sqlalchemy.orm.Mapper):
|
2020-08-26 23:16:37 +02:00
|
|
|
mapped[key] = data[key]
|
|
|
|
del data[key]
|
2020-08-26 11:27:38 +02:00
|
|
|
|
2020-08-26 23:16:37 +02:00
|
|
|
# create new item
|
2021-01-06 17:45:55 +02:00
|
|
|
item = cls(**data)
|
2020-08-26 23:16:37 +02:00
|
|
|
created = True
|
2020-08-26 11:27:38 +02:00
|
|
|
|
2020-08-26 23:16:37 +02:00
|
|
|
# and update mapped relationships (below)
|
|
|
|
data = mapped
|
2020-08-26 11:27:38 +02:00
|
|
|
|
2020-08-26 23:16:37 +02:00
|
|
|
# update item
|
|
|
|
updated = []
|
|
|
|
for key, value in data.items():
|
2020-08-26 11:27:38 +02:00
|
|
|
|
2020-08-26 23:16:37 +02:00
|
|
|
# skip primary key
|
|
|
|
if key == pkey:
|
|
|
|
continue
|
|
|
|
|
2021-01-06 17:45:55 +02:00
|
|
|
if key in cls.__mapper__.relationships:
|
2020-08-26 23:16:37 +02:00
|
|
|
# update relationship
|
2021-01-06 17:45:55 +02:00
|
|
|
rel_model = cls.__mapper__.relationships[key].argument
|
2020-08-26 23:16:37 +02:00
|
|
|
if isinstance(rel_model, sqlalchemy.orm.Mapper):
|
|
|
|
rel_model = rel_model.class_
|
|
|
|
# add (and create) referenced items
|
|
|
|
cur = getattr(item, key)
|
2021-01-06 17:45:55 +02:00
|
|
|
old = sorted(cur, key=id)
|
2020-08-26 23:16:37 +02:00
|
|
|
new = []
|
|
|
|
for rel_data in value:
|
|
|
|
# get or create related item
|
|
|
|
add = rel_model.from_dict(rel_data, delete)
|
|
|
|
assert len(add) == 1
|
|
|
|
rel_item, rel_updated = add[0]
|
|
|
|
changed.append((rel_item, rel_updated))
|
|
|
|
if rel_item not in cur:
|
|
|
|
cur.append(rel_item)
|
|
|
|
new.append(rel_item)
|
|
|
|
|
|
|
|
# delete referenced items missing in yaml
|
|
|
|
rel_pkey = rel_model._dict_pkey()
|
2020-10-24 22:31:32 +02:00
|
|
|
new_data = list([i.to_dict(True, True, None, True, [rel_pkey]) for i in new])
|
2020-08-26 23:16:37 +02:00
|
|
|
for rel_item in old:
|
|
|
|
if rel_item not in new:
|
|
|
|
# check if item with same data exists to stabilze import without primary key
|
2020-10-24 22:31:32 +02:00
|
|
|
rel_data = rel_item.to_dict(True, True, None, True, [rel_pkey])
|
2020-08-26 23:16:37 +02:00
|
|
|
try:
|
|
|
|
same_idx = new_data.index(rel_data)
|
|
|
|
except ValueError:
|
|
|
|
same = None
|
|
|
|
else:
|
|
|
|
same = new[same_idx]
|
|
|
|
|
|
|
|
if same is None:
|
|
|
|
# delete items missing in new
|
|
|
|
if delete:
|
|
|
|
cur.remove(rel_item)
|
2020-08-26 11:27:38 +02:00
|
|
|
else:
|
|
|
|
new.append(rel_item)
|
2020-08-26 23:16:37 +02:00
|
|
|
else:
|
|
|
|
# swap found item with same data with newly created item
|
|
|
|
new.append(rel_item)
|
|
|
|
new_data.append(rel_data)
|
|
|
|
new.remove(same)
|
|
|
|
del new_data[same_idx]
|
2020-12-16 23:39:50 +02:00
|
|
|
for i, (ch_item, _) in enumerate(changed):
|
2020-08-26 23:16:37 +02:00
|
|
|
if ch_item is same:
|
|
|
|
changed[i] = (rel_item, [])
|
|
|
|
db.session.flush()
|
|
|
|
db.session.delete(ch_item)
|
|
|
|
break
|
|
|
|
|
|
|
|
# remember changes
|
2021-01-06 17:45:55 +02:00
|
|
|
new = sorted(new, key=id)
|
2020-08-26 23:16:37 +02:00
|
|
|
if new != old:
|
|
|
|
updated.append((key, old, new))
|
|
|
|
|
|
|
|
else:
|
|
|
|
# update key
|
|
|
|
old = getattr(item, key)
|
2021-01-06 17:45:55 +02:00
|
|
|
if isinstance(old, list):
|
2020-08-26 23:16:37 +02:00
|
|
|
# deduplicate list value
|
2021-01-06 17:45:55 +02:00
|
|
|
assert isinstance(value, list)
|
2020-08-26 23:16:37 +02:00
|
|
|
value = set(value)
|
|
|
|
old = set(old)
|
|
|
|
if not delete:
|
|
|
|
value = old | value
|
|
|
|
if value != old:
|
|
|
|
updated.append((key, old, value))
|
|
|
|
setattr(item, key, value)
|
|
|
|
|
|
|
|
changed.append((item, created if created else updated))
|
2020-08-26 11:27:38 +02:00
|
|
|
|
|
|
|
return changed
|
|
|
|
|
2018-10-18 15:57:43 +02:00
|
|
|
|
2018-04-21 15:48:07 +02:00
|
|
|
# Many-to-many association table for domain managers
|
2018-12-09 17:06:53 +02:00
|
|
|
managers = db.Table('manager', Base.metadata,
|
2018-04-21 15:48:07 +02:00
|
|
|
db.Column('domain_name', IdnaDomain, db.ForeignKey('domain.name')),
|
|
|
|
db.Column('user_email', IdnaEmail, db.ForeignKey('user.email'))
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2018-12-09 17:06:53 +02:00
|
|
|
class Config(Base):
|
|
|
|
""" In-database configuration values
|
2016-03-20 12:31:14 +02:00
|
|
|
"""
|
|
|
|
|
2018-12-09 17:06:53 +02:00
|
|
|
name = db.Column(db.String(255), primary_key=True, nullable=False)
|
|
|
|
value = db.Column(JSONEncoded)
|
2016-03-20 12:31:14 +02:00
|
|
|
|
|
|
|
|
2020-12-21 00:49:42 +02:00
|
|
|
@sqlalchemy.event.listens_for(db.session, 'after_commit')
|
|
|
|
def store_dkim_key(session):
|
|
|
|
""" Store DKIM key on commit
|
|
|
|
"""
|
|
|
|
|
|
|
|
for obj in session.identity_map.values():
|
|
|
|
if isinstance(obj, Domain):
|
|
|
|
if obj._dkim_key_changed:
|
|
|
|
file_path = obj._dkim_file()
|
|
|
|
if obj._dkim_key:
|
|
|
|
with open(file_path, 'wb') as handle:
|
|
|
|
handle.write(obj._dkim_key)
|
|
|
|
elif os.path.exists(file_path):
|
|
|
|
os.unlink(file_path)
|
|
|
|
|
2016-03-20 12:31:14 +02:00
|
|
|
class Domain(Base):
|
2016-03-19 21:37:48 +02:00
|
|
|
""" A DNS domain that has mail addresses associated to it.
|
|
|
|
"""
|
2020-12-21 00:45:27 +02:00
|
|
|
|
|
|
|
__tablename__ = 'domain'
|
2016-10-16 17:18:00 +02:00
|
|
|
|
2020-08-26 11:27:38 +02:00
|
|
|
_dict_hide = {'users', 'managers', 'aliases'}
|
2020-10-24 22:31:32 +02:00
|
|
|
_dict_show = {'dkim_key'}
|
|
|
|
_dict_extra = {'dns':{'dkim_publickey', 'dns_mx', 'dns_spf', 'dns_dkim', 'dns_dmarc'}}
|
2020-08-26 11:27:38 +02:00
|
|
|
_dict_secret = {'dkim_key'}
|
2020-10-24 22:31:32 +02:00
|
|
|
_dict_types = {
|
|
|
|
'dkim_key': (bytes, type(None)),
|
|
|
|
'dkim_publickey': False,
|
|
|
|
'dns_mx': False,
|
|
|
|
'dns_spf': False,
|
|
|
|
'dns_dkim': False,
|
|
|
|
'dns_dmarc': False,
|
|
|
|
}
|
|
|
|
_dict_output = {'dkim_key': lambda key: key.decode('utf-8').strip().split('\n')[1:-1]}
|
2020-08-26 11:27:38 +02:00
|
|
|
@staticmethod
|
|
|
|
def _dict_input(data):
|
2020-08-26 23:16:37 +02:00
|
|
|
if 'dkim_key' in data:
|
2020-08-26 11:27:38 +02:00
|
|
|
key = data['dkim_key']
|
2020-10-24 22:31:13 +02:00
|
|
|
if key is not None:
|
2021-01-06 17:45:55 +02:00
|
|
|
if isinstance(key, list):
|
2020-08-26 23:16:37 +02:00
|
|
|
key = ''.join(key)
|
2021-01-06 17:45:55 +02:00
|
|
|
if isinstance(key, str):
|
2020-10-24 22:31:13 +02:00
|
|
|
key = ''.join(key.strip().split()) # removes all whitespace
|
2020-10-24 22:31:26 +02:00
|
|
|
if key == 'generate':
|
|
|
|
data['dkim_key'] = dkim.gen_key()
|
|
|
|
elif key:
|
2021-01-06 17:45:55 +02:00
|
|
|
match = re.match('^-----BEGIN (RSA )?PRIVATE KEY-----', key)
|
|
|
|
if match is not None:
|
|
|
|
key = key[match.end():]
|
|
|
|
match = re.search('-----END (RSA )?PRIVATE KEY-----$', key)
|
|
|
|
if match is not None:
|
|
|
|
key = key[:match.start()]
|
2020-10-24 22:31:13 +02:00
|
|
|
key = '\n'.join(wrap(key, 64))
|
|
|
|
key = f'-----BEGIN PRIVATE KEY-----\n{key}\n-----END PRIVATE KEY-----\n'.encode('ascii')
|
|
|
|
try:
|
|
|
|
dkim.strip_key(key)
|
|
|
|
except:
|
|
|
|
raise ValueError('invalid dkim key')
|
|
|
|
else:
|
2021-01-06 17:45:55 +02:00
|
|
|
data['dkim_key'] = key
|
2020-10-24 22:31:13 +02:00
|
|
|
else:
|
|
|
|
data['dkim_key'] = None
|
2020-08-26 11:27:38 +02:00
|
|
|
|
2018-04-14 13:00:29 +02:00
|
|
|
name = db.Column(IdnaDomain, primary_key=True, nullable=False)
|
2016-04-24 19:17:40 +02:00
|
|
|
managers = db.relationship('User', secondary=managers,
|
|
|
|
backref=db.backref('manager_of'), lazy='dynamic')
|
2019-01-05 14:45:55 +02:00
|
|
|
max_users = db.Column(db.Integer, nullable=False, default=-1)
|
|
|
|
max_aliases = db.Column(db.Integer, nullable=False, default=-1)
|
2019-01-13 17:40:28 +02:00
|
|
|
max_quota_bytes = db.Column(db.BigInteger(), nullable=False, default=0)
|
2017-12-03 13:01:25 +02:00
|
|
|
signup_enabled = db.Column(db.Boolean(), nullable=False, default=False)
|
2020-12-21 00:49:42 +02:00
|
|
|
|
|
|
|
_dkim_key = None
|
|
|
|
_dkim_key_changed = False
|
2016-02-20 14:57:26 +02:00
|
|
|
|
2020-10-24 22:31:32 +02:00
|
|
|
def _dkim_file(self):
|
2020-12-21 00:45:27 +02:00
|
|
|
return app.config['DKIM_PATH'].format(
|
|
|
|
domain=self.name, selector=app.config['DKIM_SELECTOR'])
|
2020-10-24 22:31:32 +02:00
|
|
|
|
|
|
|
@property
|
|
|
|
def dns_mx(self):
|
|
|
|
hostname = app.config['HOSTNAMES'].split(',')[0]
|
|
|
|
return f'{self.name}. 600 IN MX 10 {hostname}.'
|
|
|
|
|
|
|
|
@property
|
|
|
|
def dns_spf(self):
|
|
|
|
hostname = app.config['HOSTNAMES'].split(',')[0]
|
|
|
|
return f'{self.name}. 600 IN TXT "v=spf1 mx a:{hostname} ~all"'
|
|
|
|
|
|
|
|
@property
|
|
|
|
def dns_dkim(self):
|
|
|
|
if os.path.exists(self._dkim_file()):
|
|
|
|
selector = app.config['DKIM_SELECTOR']
|
|
|
|
return f'{selector}._domainkey.{self.name}. 600 IN TXT "v=DKIM1; k=rsa; p={self.dkim_publickey}"'
|
|
|
|
|
|
|
|
@property
|
|
|
|
def dns_dmarc(self):
|
|
|
|
if os.path.exists(self._dkim_file()):
|
|
|
|
domain = app.config['DOMAIN']
|
|
|
|
rua = app.config['DMARC_RUA']
|
|
|
|
rua = f' rua=mailto:{rua}@{domain};' if rua else ''
|
|
|
|
ruf = app.config['DMARC_RUF']
|
|
|
|
ruf = f' ruf=mailto:{ruf}@{domain};' if ruf else ''
|
|
|
|
return f'_dmarc.{self.name}. 600 IN TXT "v=DMARC1; p=reject;{rua}{ruf} adkim=s; aspf=s"'
|
|
|
|
|
2016-06-25 15:50:05 +02:00
|
|
|
@property
|
|
|
|
def dkim_key(self):
|
2020-12-21 00:49:42 +02:00
|
|
|
if self._dkim_key is None:
|
|
|
|
file_path = self._dkim_file()
|
|
|
|
if os.path.exists(file_path):
|
|
|
|
with open(file_path, 'rb') as handle:
|
|
|
|
self._dkim_key = handle.read()
|
|
|
|
else:
|
|
|
|
self._dkim_key = b''
|
|
|
|
return self._dkim_key if self._dkim_key else None
|
2016-06-25 15:50:05 +02:00
|
|
|
|
|
|
|
@dkim_key.setter
|
|
|
|
def dkim_key(self, value):
|
2020-12-21 00:49:42 +02:00
|
|
|
old_key = self.dkim_key
|
2020-10-24 22:31:13 +02:00
|
|
|
if value is None:
|
2020-12-21 00:49:42 +02:00
|
|
|
value = b''
|
|
|
|
self._dkim_key_changed = value != old_key
|
|
|
|
self._dkim_key = value
|
2016-06-25 15:50:05 +02:00
|
|
|
|
|
|
|
@property
|
|
|
|
def dkim_publickey(self):
|
2016-06-25 16:06:52 +02:00
|
|
|
dkim_key = self.dkim_key
|
|
|
|
if dkim_key:
|
2020-12-21 00:49:42 +02:00
|
|
|
return dkim.strip_key(dkim_key).decode('utf8')
|
2016-06-25 15:50:05 +02:00
|
|
|
|
|
|
|
def generate_dkim_key(self):
|
|
|
|
self.dkim_key = dkim.gen_key()
|
|
|
|
|
2016-05-01 20:04:40 +02:00
|
|
|
def has_email(self, localpart):
|
|
|
|
for email in self.users + self.aliases:
|
|
|
|
if email.localpart == localpart:
|
2016-03-22 20:47:15 +02:00
|
|
|
return True
|
2021-01-06 17:45:55 +02:00
|
|
|
return False
|
2016-03-22 20:47:15 +02:00
|
|
|
|
2018-04-21 13:56:20 +02:00
|
|
|
def check_mx(self):
|
|
|
|
try:
|
|
|
|
hostnames = app.config['HOSTNAMES'].split(',')
|
|
|
|
return any(
|
|
|
|
str(rset).split()[-1][:-1] in hostnames
|
|
|
|
for rset in dns.resolver.query(self.name, 'MX')
|
|
|
|
)
|
2020-08-26 11:11:23 +02:00
|
|
|
except Exception:
|
2018-04-21 13:56:20 +02:00
|
|
|
return False
|
|
|
|
|
2016-02-20 14:57:26 +02:00
|
|
|
def __str__(self):
|
2021-01-06 17:45:55 +02:00
|
|
|
return str(self.name)
|
2016-02-20 14:57:26 +02:00
|
|
|
|
2016-09-29 12:10:58 +02:00
|
|
|
def __eq__(self, other):
|
|
|
|
try:
|
|
|
|
return self.name == other.name
|
|
|
|
except AttributeError:
|
|
|
|
return False
|
|
|
|
|
2016-02-20 14:57:26 +02:00
|
|
|
|
2017-09-03 18:30:00 +02:00
|
|
|
class Alternative(Base):
|
|
|
|
""" Alternative name for a served domain.
|
|
|
|
The name "domain alias" was avoided to prevent some confusion.
|
|
|
|
"""
|
|
|
|
|
2020-12-21 00:45:27 +02:00
|
|
|
__tablename__ = 'alternative'
|
2017-09-03 18:30:00 +02:00
|
|
|
|
2018-04-14 13:00:29 +02:00
|
|
|
name = db.Column(IdnaDomain, primary_key=True, nullable=False)
|
|
|
|
domain_name = db.Column(IdnaDomain, db.ForeignKey(Domain.name))
|
2017-09-03 18:30:00 +02:00
|
|
|
domain = db.relationship(Domain,
|
|
|
|
backref=db.backref('alternatives', cascade='all, delete-orphan'))
|
|
|
|
|
|
|
|
def __str__(self):
|
2021-01-06 17:45:55 +02:00
|
|
|
return str(self.name)
|
2017-09-03 18:30:00 +02:00
|
|
|
|
|
|
|
|
2017-09-10 20:30:03 +02:00
|
|
|
class Relay(Base):
|
|
|
|
""" Relayed mail domain.
|
|
|
|
The domain is either relayed publicly or through a specified SMTP host.
|
|
|
|
"""
|
|
|
|
|
2020-12-21 00:45:27 +02:00
|
|
|
__tablename__ = 'relay'
|
2017-09-10 20:30:03 +02:00
|
|
|
|
2020-08-26 11:27:38 +02:00
|
|
|
_dict_mandatory = {'smtp'}
|
|
|
|
|
2018-12-09 17:54:54 +02:00
|
|
|
name = db.Column(IdnaDomain, primary_key=True, nullable=False)
|
2018-04-12 22:21:28 +02:00
|
|
|
smtp = db.Column(db.String(80), nullable=True)
|
2017-09-10 20:30:03 +02:00
|
|
|
|
|
|
|
def __str__(self):
|
2021-01-06 17:45:55 +02:00
|
|
|
return str(self.name)
|
2017-09-10 20:30:03 +02:00
|
|
|
|
|
|
|
|
2016-10-16 17:18:00 +02:00
|
|
|
class Email(object):
|
2016-05-01 20:04:40 +02:00
|
|
|
""" Abstraction for an email address (localpart and domain).
|
2016-03-19 21:37:48 +02:00
|
|
|
"""
|
|
|
|
|
2016-04-20 21:14:04 +02:00
|
|
|
localpart = db.Column(db.String(80), nullable=False)
|
2016-03-19 21:37:48 +02:00
|
|
|
|
2020-08-26 11:27:38 +02:00
|
|
|
@staticmethod
|
|
|
|
def _dict_input(data):
|
|
|
|
if 'email' in data:
|
|
|
|
if 'localpart' in data or 'domain' in data:
|
|
|
|
raise ValueError('ambigous key email and localpart/domain')
|
2021-01-06 17:45:55 +02:00
|
|
|
elif isinstance(data['email'], str):
|
2020-08-26 11:27:38 +02:00
|
|
|
data['localpart'], data['domain'] = data['email'].rsplit('@', 1)
|
|
|
|
else:
|
2020-12-21 00:45:27 +02:00
|
|
|
data['email'] = f'{data["localpart"]}@{data["domain"]}'
|
2020-08-26 11:27:38 +02:00
|
|
|
|
2016-03-19 21:37:48 +02:00
|
|
|
@declarative.declared_attr
|
|
|
|
def domain_name(cls):
|
2018-04-14 13:00:29 +02:00
|
|
|
return db.Column(IdnaDomain, db.ForeignKey(Domain.name),
|
|
|
|
nullable=False, default=IdnaDomain)
|
2016-02-20 14:57:26 +02:00
|
|
|
|
2016-04-20 21:14:04 +02:00
|
|
|
# This field is redundant with both localpart and domain name.
|
|
|
|
# It is however very useful for quick lookups without joining tables,
|
2018-04-14 13:00:29 +02:00
|
|
|
# especially when the mail server is reading the database.
|
2016-04-20 21:14:04 +02:00
|
|
|
@declarative.declared_attr
|
2016-05-01 20:04:40 +02:00
|
|
|
def email(cls):
|
2020-12-21 00:45:27 +02:00
|
|
|
updater = lambda context: '{0}@{1}'.format(
|
|
|
|
context.current_parameters['localpart'],
|
|
|
|
context.current_parameters['domain_name'],
|
2016-04-20 21:14:04 +02:00
|
|
|
)
|
2018-04-14 13:00:29 +02:00
|
|
|
return db.Column(IdnaEmail,
|
2016-04-20 21:14:04 +02:00
|
|
|
primary_key=True, nullable=False,
|
|
|
|
default=updater)
|
2016-02-20 14:57:26 +02:00
|
|
|
|
2017-11-10 12:55:58 +02:00
|
|
|
def sendmail(self, subject, body):
|
|
|
|
""" Send an email to the address.
|
|
|
|
"""
|
2020-12-21 00:45:27 +02:00
|
|
|
from_address = '{0}@{1}'.format(
|
2018-06-02 10:59:57 +02:00
|
|
|
app.config['POSTMASTER'],
|
|
|
|
idna.encode(app.config['DOMAIN']).decode('ascii'),
|
|
|
|
)
|
2018-04-14 23:47:41 +02:00
|
|
|
with smtplib.SMTP(app.config['HOST_AUTHSMTP'], port=10025) as smtp:
|
2020-12-21 00:45:27 +02:00
|
|
|
to_address = '{0}@{1}'.format(
|
2018-06-02 10:59:57 +02:00
|
|
|
self.localpart,
|
|
|
|
idna.encode(self.domain_name).decode('ascii'),
|
|
|
|
)
|
2017-11-10 12:55:58 +02:00
|
|
|
msg = text.MIMEText(body)
|
|
|
|
msg['Subject'] = subject
|
|
|
|
msg['From'] = from_address
|
2018-06-02 10:59:57 +02:00
|
|
|
msg['To'] = to_address
|
|
|
|
smtp.sendmail(from_address, [to_address], msg.as_string())
|
|
|
|
|
2018-10-07 16:24:48 +02:00
|
|
|
@classmethod
|
|
|
|
def resolve_domain(cls, email):
|
|
|
|
localpart, domain_name = email.split('@', 1) if '@' in email else (None, email)
|
2018-10-23 10:53:52 +02:00
|
|
|
alternative = Alternative.query.get(domain_name)
|
2018-10-07 16:24:48 +02:00
|
|
|
if alternative:
|
|
|
|
domain_name = alternative.domain_name
|
|
|
|
return (localpart, domain_name)
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def resolve_destination(cls, localpart, domain_name, ignore_forward_keep=False):
|
2018-12-27 16:36:24 +02:00
|
|
|
localpart_stripped = None
|
2019-01-10 18:30:11 +02:00
|
|
|
stripped_alias = None
|
2019-01-10 11:28:57 +02:00
|
|
|
|
2018-12-27 16:36:24 +02:00
|
|
|
if os.environ.get('RECIPIENT_DELIMITER') in localpart:
|
|
|
|
localpart_stripped = localpart.rsplit(os.environ.get('RECIPIENT_DELIMITER'), 1)[0]
|
|
|
|
|
2018-10-23 10:53:52 +02:00
|
|
|
user = User.query.get('{}@{}'.format(localpart, domain_name))
|
2018-12-27 16:36:24 +02:00
|
|
|
if not user and localpart_stripped:
|
|
|
|
user = User.query.get('{}@{}'.format(localpart_stripped, domain_name))
|
2018-10-07 16:24:48 +02:00
|
|
|
if user:
|
2019-12-27 22:11:50 +02:00
|
|
|
email = '{}@{}'.format(localpart, domain_name)
|
|
|
|
|
2018-10-07 16:24:48 +02:00
|
|
|
if user.forward_enabled:
|
|
|
|
destination = user.forward_destination
|
|
|
|
if user.forward_keep or ignore_forward_keep:
|
2019-12-27 22:11:50 +02:00
|
|
|
destination.append(email)
|
2018-10-07 16:24:48 +02:00
|
|
|
else:
|
2019-12-27 22:11:50 +02:00
|
|
|
destination = [email]
|
2018-10-07 16:24:48 +02:00
|
|
|
return destination
|
2017-11-10 12:55:58 +02:00
|
|
|
|
2019-01-11 12:51:56 +02:00
|
|
|
pure_alias = Alias.resolve(localpart, domain_name)
|
|
|
|
stripped_alias = Alias.resolve(localpart_stripped, domain_name)
|
|
|
|
|
|
|
|
if pure_alias and not pure_alias.wildcard:
|
|
|
|
return pure_alias.destination
|
|
|
|
elif stripped_alias:
|
|
|
|
return stripped_alias.destination
|
|
|
|
elif pure_alias:
|
|
|
|
return pure_alias.destination
|
|
|
|
|
2016-04-20 21:14:04 +02:00
|
|
|
def __str__(self):
|
2021-01-06 17:45:55 +02:00
|
|
|
return str(self.email)
|
2016-02-20 14:57:26 +02:00
|
|
|
|
2016-03-19 21:37:48 +02:00
|
|
|
|
2016-10-16 17:18:00 +02:00
|
|
|
class User(Base, Email):
|
2016-05-01 20:04:40 +02:00
|
|
|
""" A user is an email address that has a password to access a mailbox.
|
2016-03-19 21:37:48 +02:00
|
|
|
"""
|
2020-12-21 00:45:27 +02:00
|
|
|
|
|
|
|
__tablename__ = 'user'
|
2016-10-16 17:18:00 +02:00
|
|
|
|
2020-08-26 11:27:38 +02:00
|
|
|
_dict_hide = {'domain_name', 'domain', 'localpart', 'quota_bytes_used'}
|
|
|
|
_dict_mandatory = {'localpart', 'domain', 'password'}
|
|
|
|
@classmethod
|
|
|
|
def _dict_input(cls, data):
|
|
|
|
Email._dict_input(data)
|
|
|
|
# handle password
|
|
|
|
if 'password' in data:
|
|
|
|
if 'password_hash' in data or 'hash_scheme' in data:
|
|
|
|
raise ValueError('ambigous key password and password_hash/hash_scheme')
|
|
|
|
# check (hashed) password
|
|
|
|
password = data['password']
|
|
|
|
if password.startswith('{') and '}' in password:
|
|
|
|
scheme = password[1:password.index('}')]
|
|
|
|
if scheme not in cls.scheme_dict:
|
|
|
|
raise ValueError(f'invalid password scheme {scheme!r}')
|
|
|
|
else:
|
|
|
|
raise ValueError(f'invalid hashed password {password!r}')
|
|
|
|
elif 'password_hash' in data and 'hash_scheme' in data:
|
|
|
|
if data['hash_scheme'] not in cls.scheme_dict:
|
|
|
|
raise ValueError(f'invalid password scheme {scheme!r}')
|
|
|
|
data['password'] = '{'+data['hash_scheme']+'}'+ data['password_hash']
|
2020-08-26 23:16:37 +02:00
|
|
|
del data['hash_scheme']
|
|
|
|
del data['password_hash']
|
2020-08-26 11:27:38 +02:00
|
|
|
|
2016-08-11 14:13:56 +02:00
|
|
|
domain = db.relationship(Domain,
|
|
|
|
backref=db.backref('users', cascade='all, delete-orphan'))
|
2016-03-19 21:37:48 +02:00
|
|
|
password = db.Column(db.String(255), nullable=False)
|
2019-01-13 17:40:28 +02:00
|
|
|
quota_bytes = db.Column(db.BigInteger(), nullable=False, default=10**9)
|
|
|
|
quota_bytes_used = db.Column(db.BigInteger(), nullable=False, default=0)
|
2016-03-19 21:37:48 +02:00
|
|
|
global_admin = db.Column(db.Boolean(), nullable=False, default=False)
|
2018-04-15 11:23:58 +02:00
|
|
|
enabled = db.Column(db.Boolean(), nullable=False, default=True)
|
2016-03-19 21:37:48 +02:00
|
|
|
|
2016-03-22 22:05:08 +02:00
|
|
|
# Features
|
|
|
|
enable_imap = db.Column(db.Boolean(), nullable=False, default=True)
|
|
|
|
enable_pop = db.Column(db.Boolean(), nullable=False, default=True)
|
|
|
|
|
2016-03-20 12:00:01 +02:00
|
|
|
# Filters
|
2016-05-04 16:12:56 +02:00
|
|
|
forward_enabled = db.Column(db.Boolean(), nullable=False, default=False)
|
2018-12-04 16:22:18 +02:00
|
|
|
forward_destination = db.Column(CommaSeparatedList(), nullable=True, default=[])
|
2017-09-03 15:43:30 +02:00
|
|
|
forward_keep = db.Column(db.Boolean(), nullable=False, default=True)
|
2016-05-04 16:12:56 +02:00
|
|
|
reply_enabled = db.Column(db.Boolean(), nullable=False, default=False)
|
2016-03-20 12:14:27 +02:00
|
|
|
reply_subject = db.Column(db.String(255), nullable=True, default=None)
|
2016-03-20 12:00:01 +02:00
|
|
|
reply_body = db.Column(db.Text(), nullable=True, default=None)
|
2018-09-27 22:45:16 +02:00
|
|
|
reply_startdate = db.Column(db.Date, nullable=False,
|
|
|
|
default=date(1900, 1, 1))
|
2017-11-10 16:25:30 +02:00
|
|
|
reply_enddate = db.Column(db.Date, nullable=False,
|
|
|
|
default=date(2999, 12, 31))
|
2016-03-20 12:00:01 +02:00
|
|
|
|
2016-03-20 12:09:06 +02:00
|
|
|
# Settings
|
2020-12-21 00:45:27 +02:00
|
|
|
displayed_name = db.Column(db.String(160), nullable=False, default='')
|
2016-03-20 12:09:06 +02:00
|
|
|
spam_enabled = db.Column(db.Boolean(), nullable=False, default=True)
|
2018-12-09 17:06:53 +02:00
|
|
|
spam_threshold = db.Column(db.Integer(), nullable=False, default=80)
|
2016-03-20 12:09:06 +02:00
|
|
|
|
|
|
|
# Flask-login attributes
|
2016-03-19 21:37:48 +02:00
|
|
|
is_authenticated = True
|
|
|
|
is_active = True
|
|
|
|
is_anonymous = False
|
|
|
|
|
2016-04-20 21:14:04 +02:00
|
|
|
def get_id(self):
|
2016-05-01 20:04:40 +02:00
|
|
|
return self.email
|
2016-04-20 21:14:04 +02:00
|
|
|
|
2018-04-21 15:48:07 +02:00
|
|
|
@property
|
2018-07-26 21:38:21 +02:00
|
|
|
def destination(self):
|
2018-09-26 00:14:46 +02:00
|
|
|
if self.forward_enabled:
|
2020-08-26 11:19:01 +02:00
|
|
|
result = list(self.forward_destination)
|
2018-07-26 21:38:21 +02:00
|
|
|
if self.forward_keep:
|
2020-08-26 11:19:01 +02:00
|
|
|
result.append(self.email)
|
|
|
|
return ','.join(result)
|
2018-07-26 21:38:21 +02:00
|
|
|
else:
|
|
|
|
return self.email
|
2018-04-21 15:48:07 +02:00
|
|
|
|
2018-10-15 21:52:06 +02:00
|
|
|
@property
|
|
|
|
def reply_active(self):
|
2018-10-16 20:38:18 +02:00
|
|
|
now = date.today()
|
2018-10-15 21:52:06 +02:00
|
|
|
return (
|
|
|
|
self.reply_enabled and
|
|
|
|
self.reply_startdate < now and
|
|
|
|
self.reply_enddate > now
|
|
|
|
)
|
2018-04-21 15:48:07 +02:00
|
|
|
|
2020-12-21 00:45:27 +02:00
|
|
|
scheme_dict = {'PBKDF2': 'pbkdf2_sha512',
|
|
|
|
'BLF-CRYPT': 'bcrypt',
|
|
|
|
'SHA512-CRYPT': 'sha512_crypt',
|
|
|
|
'SHA256-CRYPT': 'sha256_crypt',
|
|
|
|
'MD5-CRYPT': 'md5_crypt',
|
|
|
|
'CRYPT': 'des_crypt'}
|
2018-10-18 17:55:07 +02:00
|
|
|
|
|
|
|
def get_password_context(self):
|
2021-01-06 17:45:55 +02:00
|
|
|
return passlib.context.CryptContext(
|
2018-10-18 17:55:07 +02:00
|
|
|
schemes=self.scheme_dict.values(),
|
|
|
|
default=self.scheme_dict[app.config['PASSWORD_SCHEME']],
|
|
|
|
)
|
2016-03-19 21:37:48 +02:00
|
|
|
|
|
|
|
def check_password(self, password):
|
Fix password context
Fixes the following error:
```
admin_1 | [2018-11-09 09:44:10,533] ERROR in app: Exception on /internal/auth/email [GET]
admin_1 | Traceback (most recent call last):
admin_1 | File "/usr/lib/python3.6/site-packages/flask/app.py", line 2292, in wsgi_app
admin_1 | response = self.full_dispatch_request()
admin_1 | File "/usr/lib/python3.6/site-packages/flask/app.py", line 1815, in full_dispatch_request
admin_1 | rv = self.handle_user_exception(e)
admin_1 | File "/usr/lib/python3.6/site-packages/flask/app.py", line 1718, in handle_user_exception
admin_1 | reraise(exc_type, exc_value, tb)
admin_1 | File "/usr/lib/python3.6/site-packages/flask/_compat.py", line 35, in reraise
admin_1 | raise value
admin_1 | File "/usr/lib/python3.6/site-packages/flask/app.py", line 1813, in full_dispatch_request
admin_1 | rv = self.dispatch_request()
admin_1 | File "/usr/lib/python3.6/site-packages/flask/app.py", line 1799, in dispatch_request
admin_1 | return self.view_functions[rule.endpoint](**req.view_args)
admin_1 | File "/usr/lib/python3.6/site-packages/flask_limiter/extension.py", line 544, in __inner
admin_1 | return obj(*a, **k)
admin_1 | File "/app/mailu/internal/views/auth.py", line 18, in nginx_authentication
admin_1 | headers = nginx.handle_authentication(flask.request.headers)
admin_1 | File "/app/mailu/internal/nginx.py", line 48, in handle_authentication
admin_1 | if user.check_password(password):
admin_1 | File "/app/mailu/models.py", line 333, in check_password
admin_1 | context = User.pw_context
admin_1 | AttributeError: type object 'User' has no attribute 'pw_context'
```
2018-11-09 11:45:08 +02:00
|
|
|
context = self.get_password_context()
|
2016-03-20 16:36:56 +02:00
|
|
|
reference = re.match('({[^}]+})?(.*)', self.password).group(2)
|
2018-10-17 21:21:35 +02:00
|
|
|
result = context.verify(password, reference)
|
|
|
|
if result and context.identify(reference) != context.default_scheme():
|
|
|
|
self.set_password(password)
|
|
|
|
db.session.add(self)
|
|
|
|
db.session.commit()
|
|
|
|
return result
|
2016-03-19 21:37:48 +02:00
|
|
|
|
2018-10-18 17:55:07 +02:00
|
|
|
def set_password(self, password, hash_scheme=None, raw=False):
|
2017-08-24 16:23:54 +02:00
|
|
|
"""Set password for user with specified encryption scheme
|
2020-12-21 00:45:27 +02:00
|
|
|
@password: plain text password to encrypt (if raw == True the hash itself)
|
2017-08-24 16:23:54 +02:00
|
|
|
"""
|
2018-10-18 17:55:07 +02:00
|
|
|
if hash_scheme is None:
|
|
|
|
hash_scheme = app.config['PASSWORD_SCHEME']
|
2017-08-24 16:23:54 +02:00
|
|
|
# for the list of hash schemes see https://wiki2.dovecot.org/Authentication/PasswordSchemes
|
|
|
|
if raw:
|
|
|
|
self.password = '{'+hash_scheme+'}' + password
|
|
|
|
else:
|
2018-10-18 17:55:07 +02:00
|
|
|
self.password = '{'+hash_scheme+'}' + self.get_password_context().encrypt(password, self.scheme_dict[hash_scheme])
|
2016-03-19 21:37:48 +02:00
|
|
|
|
|
|
|
def get_managed_domains(self):
|
|
|
|
if self.global_admin:
|
|
|
|
return Domain.query.all()
|
|
|
|
else:
|
2016-04-24 19:42:02 +02:00
|
|
|
return self.manager_of
|
2016-03-19 21:37:48 +02:00
|
|
|
|
2016-06-19 15:34:14 +02:00
|
|
|
def get_managed_emails(self, include_aliases=True):
|
2016-05-01 20:04:40 +02:00
|
|
|
emails = []
|
2016-03-22 21:34:21 +02:00
|
|
|
for domain in self.get_managed_domains():
|
2016-05-01 20:04:40 +02:00
|
|
|
emails.extend(domain.users)
|
2016-06-19 15:34:14 +02:00
|
|
|
if include_aliases:
|
|
|
|
emails.extend(domain.aliases)
|
2016-05-01 20:04:40 +02:00
|
|
|
return emails
|
2016-03-22 21:34:21 +02:00
|
|
|
|
2017-11-10 12:55:58 +02:00
|
|
|
def send_welcome(self):
|
2020-12-21 00:50:26 +02:00
|
|
|
if app.config['WELCOME']:
|
|
|
|
self.sendmail(app.config['WELCOME_SUBJECT'],
|
|
|
|
app.config['WELCOME_BODY'])
|
2017-11-10 12:55:58 +02:00
|
|
|
|
2018-11-08 21:30:41 +02:00
|
|
|
@classmethod
|
|
|
|
def get(cls, email):
|
|
|
|
return cls.query.get(email)
|
|
|
|
|
2016-03-19 21:37:48 +02:00
|
|
|
@classmethod
|
|
|
|
def login(cls, email, password):
|
2016-05-01 20:09:47 +02:00
|
|
|
user = cls.query.get(email)
|
2018-04-15 13:42:08 +02:00
|
|
|
return user if (user and user.enabled and user.check_password(password)) else None
|
2016-03-19 21:37:48 +02:00
|
|
|
|
|
|
|
|
2016-10-16 17:18:00 +02:00
|
|
|
class Alias(Base, Email):
|
2016-05-01 20:04:40 +02:00
|
|
|
""" An alias is an email address that redirects to some destination.
|
2016-03-19 21:37:48 +02:00
|
|
|
"""
|
2020-12-21 00:45:27 +02:00
|
|
|
|
|
|
|
__tablename__ = 'alias'
|
2016-10-16 17:18:00 +02:00
|
|
|
|
2020-08-26 11:27:38 +02:00
|
|
|
_dict_hide = {'domain_name', 'domain', 'localpart'}
|
|
|
|
@staticmethod
|
|
|
|
def _dict_input(data):
|
2020-08-26 23:16:37 +02:00
|
|
|
Email._dict_input(data)
|
2020-08-26 11:27:38 +02:00
|
|
|
# handle comma delimited string for backwards compability
|
|
|
|
dst = data.get('destination')
|
2021-01-06 17:45:55 +02:00
|
|
|
if isinstance(dst, str):
|
2020-08-26 11:27:38 +02:00
|
|
|
data['destination'] = list([adr.strip() for adr in dst.split(',')])
|
|
|
|
|
2016-08-11 14:13:56 +02:00
|
|
|
domain = db.relationship(Domain,
|
|
|
|
backref=db.backref('aliases', cascade='all, delete-orphan'))
|
2016-08-20 12:23:55 +02:00
|
|
|
wildcard = db.Column(db.Boolean(), nullable=False, default=False)
|
2016-08-11 16:32:50 +02:00
|
|
|
destination = db.Column(CommaSeparatedList, nullable=False, default=[])
|
2016-04-28 20:07:38 +02:00
|
|
|
|
2018-07-26 21:38:21 +02:00
|
|
|
@classmethod
|
|
|
|
def resolve(cls, localpart, domain_name):
|
2019-04-14 13:37:09 +02:00
|
|
|
alias_preserve_case = cls.query.filter(
|
|
|
|
sqlalchemy.and_(cls.domain_name == domain_name,
|
|
|
|
sqlalchemy.or_(
|
|
|
|
sqlalchemy.and_(
|
|
|
|
cls.wildcard == False,
|
|
|
|
cls.localpart == localpart
|
|
|
|
), sqlalchemy.and_(
|
|
|
|
cls.wildcard == True,
|
2020-12-21 00:45:27 +02:00
|
|
|
sqlalchemy.bindparam('l', localpart).like(cls.localpart)
|
2019-04-14 13:37:09 +02:00
|
|
|
)
|
2018-09-27 14:53:23 +02:00
|
|
|
)
|
2018-07-26 21:38:21 +02:00
|
|
|
)
|
2019-04-14 13:37:09 +02:00
|
|
|
).order_by(cls.wildcard, sqlalchemy.func.char_length(cls.localpart).desc()).first()
|
2018-07-26 21:38:21 +02:00
|
|
|
|
2020-03-06 14:50:39 +02:00
|
|
|
localpart_lower = localpart.lower() if localpart else None
|
|
|
|
alias_lower_case = cls.query.filter(
|
2019-04-14 13:37:09 +02:00
|
|
|
sqlalchemy.and_(cls.domain_name == domain_name,
|
|
|
|
sqlalchemy.or_(
|
|
|
|
sqlalchemy.and_(
|
|
|
|
cls.wildcard == False,
|
2020-03-06 14:50:39 +02:00
|
|
|
sqlalchemy.func.lower(cls.localpart) == localpart_lower
|
2019-04-14 13:37:09 +02:00
|
|
|
), sqlalchemy.and_(
|
|
|
|
cls.wildcard == True,
|
2020-12-21 00:45:27 +02:00
|
|
|
sqlalchemy.bindparam('l', localpart_lower).like(sqlalchemy.func.lower(cls.localpart))
|
2019-04-14 13:37:09 +02:00
|
|
|
)
|
|
|
|
)
|
|
|
|
)
|
|
|
|
).order_by(cls.wildcard, sqlalchemy.func.char_length(sqlalchemy.func.lower(cls.localpart)).desc()).first()
|
2016-04-28 20:07:38 +02:00
|
|
|
|
2020-03-06 14:50:39 +02:00
|
|
|
if alias_preserve_case and alias_lower_case:
|
|
|
|
if alias_preserve_case.wildcard:
|
|
|
|
return alias_lower_case
|
|
|
|
else:
|
|
|
|
return alias_preserve_case
|
|
|
|
elif alias_preserve_case and not alias_lower_case:
|
|
|
|
return alias_preserve_case
|
|
|
|
elif alias_lower_case and not alias_preserve_case:
|
|
|
|
return alias_lower_case
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
2017-10-29 15:48:34 +02:00
|
|
|
class Token(Base):
|
|
|
|
""" A token is an application password for a given user.
|
|
|
|
"""
|
2020-12-21 00:45:27 +02:00
|
|
|
|
|
|
|
__tablename__ = 'token'
|
2017-10-29 15:48:34 +02:00
|
|
|
|
2020-08-26 11:27:38 +02:00
|
|
|
_dict_recurse = True
|
|
|
|
_dict_hide = {'user', 'user_email'}
|
|
|
|
_dict_mandatory = {'password'}
|
|
|
|
|
2017-10-29 15:48:34 +02:00
|
|
|
id = db.Column(db.Integer(), primary_key=True)
|
|
|
|
user_email = db.Column(db.String(255), db.ForeignKey(User.email),
|
|
|
|
nullable=False)
|
|
|
|
user = db.relationship(User,
|
|
|
|
backref=db.backref('tokens', cascade='all, delete-orphan'))
|
|
|
|
password = db.Column(db.String(255), nullable=False)
|
|
|
|
ip = db.Column(db.String(255))
|
|
|
|
|
|
|
|
def check_password(self, password):
|
2021-01-06 17:45:55 +02:00
|
|
|
return passlib.hash.sha256_crypt.verify(password, self.password)
|
2017-10-29 15:48:34 +02:00
|
|
|
|
|
|
|
def set_password(self, password):
|
2021-01-06 17:45:55 +02:00
|
|
|
self.password = passlib.hash.sha256_crypt.using(rounds=1000).hash(password)
|
2017-10-29 15:48:34 +02:00
|
|
|
|
|
|
|
def __str__(self):
|
2020-08-26 11:27:38 +02:00
|
|
|
return self.comment or self.ip
|
2017-10-29 15:48:34 +02:00
|
|
|
|
|
|
|
|
2016-04-28 20:07:38 +02:00
|
|
|
class Fetch(Base):
|
2020-08-26 11:11:23 +02:00
|
|
|
""" A fetched account is a remote POP/IMAP account fetched into a local
|
2016-04-28 20:07:38 +02:00
|
|
|
account.
|
|
|
|
"""
|
2020-12-21 00:45:27 +02:00
|
|
|
|
|
|
|
__tablename__ = 'fetch'
|
2016-10-16 17:18:00 +02:00
|
|
|
|
2020-08-26 11:27:38 +02:00
|
|
|
_dict_recurse = True
|
|
|
|
_dict_hide = {'user_email', 'user', 'last_check', 'error'}
|
|
|
|
_dict_mandatory = {'protocol', 'host', 'port', 'username', 'password'}
|
|
|
|
_dict_secret = {'password'}
|
|
|
|
|
2016-04-28 20:07:38 +02:00
|
|
|
id = db.Column(db.Integer(), primary_key=True)
|
2018-12-10 15:58:18 +02:00
|
|
|
user_email = db.Column(db.String(255), db.ForeignKey(User.email),
|
2016-04-28 20:07:38 +02:00
|
|
|
nullable=False)
|
2016-08-11 14:13:56 +02:00
|
|
|
user = db.relationship(User,
|
|
|
|
backref=db.backref('fetches', cascade='all, delete-orphan'))
|
2018-12-10 16:03:12 +02:00
|
|
|
protocol = db.Column(db.Enum('imap', 'pop3'), nullable=False)
|
2016-04-28 20:07:38 +02:00
|
|
|
host = db.Column(db.String(255), nullable=False)
|
|
|
|
port = db.Column(db.Integer(), nullable=False)
|
2020-08-26 11:27:38 +02:00
|
|
|
tls = db.Column(db.Boolean(), nullable=False, default=False)
|
2016-04-28 20:07:38 +02:00
|
|
|
username = db.Column(db.String(255), nullable=False)
|
|
|
|
password = db.Column(db.String(255), nullable=False)
|
2020-08-26 11:27:38 +02:00
|
|
|
keep = db.Column(db.Boolean(), nullable=False, default=False)
|
2016-09-10 13:05:55 +02:00
|
|
|
last_check = db.Column(db.DateTime, nullable=True)
|
|
|
|
error = db.Column(db.String(1023), nullable=True)
|
2020-08-26 11:27:38 +02:00
|
|
|
|
|
|
|
def __str__(self):
|
|
|
|
return f'{self.protocol}{"s" if self.tls else ""}://{self.username}@{self.host}:{self.port}'
|