2018-12-10 16:01:27 +02:00
|
|
|
from mailu import dkim
|
2016-02-20 14:57:26 +02:00
|
|
|
|
2016-03-19 21:37:48 +02:00
|
|
|
from sqlalchemy.ext import declarative
|
2017-10-29 15:48:34 +02:00
|
|
|
from passlib import context, hash
|
2017-11-10 16:25:30 +02:00
|
|
|
from datetime import datetime, date
|
2017-11-10 12:55:58 +02:00
|
|
|
from email.mime import text
|
2018-10-18 15:57:43 +02:00
|
|
|
from flask import current_app as app
|
2020-08-26 11:27:38 +02:00
|
|
|
from textwrap import wrap
|
2017-11-10 12:55:58 +02:00
|
|
|
|
2018-10-18 15:57:43 +02:00
|
|
|
import flask_sqlalchemy
|
2018-07-26 21:38:21 +02:00
|
|
|
import sqlalchemy
|
2016-03-20 16:36:56 +02:00
|
|
|
import re
|
2016-06-25 15:50:05 +02:00
|
|
|
import time
|
|
|
|
import os
|
|
|
|
import glob
|
2017-11-10 12:55:58 +02:00
|
|
|
import smtplib
|
2018-04-12 21:35:38 +02:00
|
|
|
import idna
|
2018-04-21 16:37:30 +02:00
|
|
|
import dns
|
2020-08-26 11:27:38 +02:00
|
|
|
import json
|
|
|
|
import itertools
|
2018-04-12 21:35:38 +02:00
|
|
|
|
|
|
|
|
2018-10-18 15:57:43 +02:00
|
|
|
db = flask_sqlalchemy.SQLAlchemy()
|
|
|
|
|
|
|
|
|
2018-04-14 13:00:29 +02:00
|
|
|
class IdnaDomain(db.TypeDecorator):
|
2018-04-12 21:35:38 +02:00
|
|
|
""" Stores a Unicode string in it's IDNA representation (ASCII only)
|
|
|
|
"""
|
|
|
|
|
2018-12-09 17:06:53 +02:00
|
|
|
impl = db.String(80)
|
2018-04-12 21:35:38 +02:00
|
|
|
|
|
|
|
def process_bind_param(self, value, dialect):
|
2018-12-06 18:04:48 +02:00
|
|
|
return idna.encode(value).decode("ascii").lower()
|
2018-04-12 21:35:38 +02:00
|
|
|
|
|
|
|
def process_result_value(self, value, dialect):
|
|
|
|
return idna.decode(value)
|
2016-03-20 16:36:56 +02:00
|
|
|
|
2020-08-26 11:27:38 +02:00
|
|
|
python_type = str
|
2016-03-19 21:37:48 +02:00
|
|
|
|
2018-04-14 13:00:29 +02:00
|
|
|
class IdnaEmail(db.TypeDecorator):
|
|
|
|
""" Stores a Unicode string in it's IDNA representation (ASCII only)
|
|
|
|
"""
|
|
|
|
|
2018-12-09 17:06:53 +02:00
|
|
|
impl = db.String(255)
|
2018-04-14 13:00:29 +02:00
|
|
|
|
|
|
|
def process_bind_param(self, value, dialect):
|
2018-11-02 12:14:23 +02:00
|
|
|
try:
|
|
|
|
localpart, domain_name = value.split('@')
|
|
|
|
return "{0}@{1}".format(
|
|
|
|
localpart,
|
|
|
|
idna.encode(domain_name).decode('ascii'),
|
2018-12-06 18:04:48 +02:00
|
|
|
).lower()
|
2018-11-05 19:36:28 +02:00
|
|
|
except ValueError:
|
2018-11-02 12:14:23 +02:00
|
|
|
pass
|
2018-04-14 13:00:29 +02:00
|
|
|
|
|
|
|
def process_result_value(self, value, dialect):
|
|
|
|
localpart, domain_name = value.split('@')
|
2018-04-21 15:48:07 +02:00
|
|
|
return "{0}@{1}".format(
|
2018-04-14 13:00:29 +02:00
|
|
|
localpart,
|
|
|
|
idna.decode(domain_name),
|
|
|
|
)
|
2016-02-20 14:57:26 +02:00
|
|
|
|
2020-08-26 11:27:38 +02:00
|
|
|
python_type = str
|
2016-02-20 21:11:59 +02:00
|
|
|
|
2016-08-11 13:33:04 +02:00
|
|
|
class CommaSeparatedList(db.TypeDecorator):
|
|
|
|
""" Stores a list as a comma-separated string, compatible with Postfix.
|
|
|
|
"""
|
|
|
|
|
|
|
|
impl = db.String
|
|
|
|
|
|
|
|
def process_bind_param(self, value, dialect):
|
2020-08-26 23:16:37 +02:00
|
|
|
if not isinstance(value, (list, set)):
|
2020-08-26 11:11:23 +02:00
|
|
|
raise TypeError("Should be a list")
|
2016-08-11 13:33:04 +02:00
|
|
|
for item in value:
|
|
|
|
if "," in item:
|
2020-08-26 11:11:23 +02:00
|
|
|
raise ValueError("Item must not contain a comma")
|
2020-08-26 23:16:37 +02:00
|
|
|
return ",".join(sorted(value))
|
2016-08-11 13:33:04 +02:00
|
|
|
|
|
|
|
def process_result_value(self, value, dialect):
|
2018-12-28 22:03:57 +02:00
|
|
|
return list(filter(bool, value.split(","))) if value else []
|
2016-08-11 13:33:04 +02:00
|
|
|
|
2020-08-26 11:27:38 +02:00
|
|
|
python_type = list
|
2016-08-11 13:33:04 +02:00
|
|
|
|
2018-10-18 15:57:43 +02:00
|
|
|
class JSONEncoded(db.TypeDecorator):
|
2020-08-26 11:11:23 +02:00
|
|
|
""" Represents an immutable structure as a json-encoded string.
|
2018-10-18 15:57:43 +02:00
|
|
|
"""
|
|
|
|
|
|
|
|
impl = db.String
|
|
|
|
|
|
|
|
def process_bind_param(self, value, dialect):
|
|
|
|
return json.dumps(value) if value else None
|
|
|
|
|
|
|
|
def process_result_value(self, value, dialect):
|
|
|
|
return json.loads(value) if value else None
|
|
|
|
|
2020-08-26 11:27:38 +02:00
|
|
|
python_type = str
|
2018-10-18 15:57:43 +02:00
|
|
|
|
2018-12-09 17:06:53 +02:00
|
|
|
class Base(db.Model):
|
|
|
|
""" Base class for all models
|
2018-10-18 15:57:43 +02:00
|
|
|
"""
|
|
|
|
|
2018-12-09 17:06:53 +02:00
|
|
|
__abstract__ = True
|
|
|
|
|
|
|
|
metadata = sqlalchemy.schema.MetaData(
|
|
|
|
naming_convention={
|
|
|
|
"fk": "%(table_name)s_%(column_0_name)s_fkey",
|
|
|
|
"pk": "%(table_name)s_pkey"
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
2019-01-15 17:41:58 +02:00
|
|
|
created_at = db.Column(db.Date, nullable=False, default=date.today)
|
|
|
|
updated_at = db.Column(db.Date, nullable=True, onupdate=date.today)
|
2018-12-09 17:06:53 +02:00
|
|
|
comment = db.Column(db.String(255), nullable=True)
|
2018-10-18 15:57:43 +02:00
|
|
|
|
2020-08-26 11:27:38 +02:00
|
|
|
@classmethod
|
|
|
|
def _dict_pkey(model):
|
|
|
|
return model.__mapper__.primary_key[0].name
|
|
|
|
|
|
|
|
def _dict_pval(self):
|
|
|
|
return getattr(self, self._dict_pkey())
|
|
|
|
|
2020-10-24 22:31:32 +02:00
|
|
|
def to_dict(self, full=False, include_secrets=False, include_extra=None, recursed=False, hide=None):
|
2020-08-26 11:27:38 +02:00
|
|
|
""" Return a dictionary representation of this model.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if recursed and not getattr(self, '_dict_recurse', False):
|
|
|
|
return str(self)
|
|
|
|
|
|
|
|
hide = set(hide or []) | {'created_at', 'updated_at'}
|
|
|
|
if hasattr(self, '_dict_hide'):
|
|
|
|
hide |= self._dict_hide
|
|
|
|
|
|
|
|
secret = set()
|
|
|
|
if not include_secrets and hasattr(self, '_dict_secret'):
|
|
|
|
secret |= self._dict_secret
|
|
|
|
|
|
|
|
convert = getattr(self, '_dict_output', {})
|
|
|
|
|
2020-10-24 22:31:32 +02:00
|
|
|
extra_keys = getattr(self, '_dict_extra', {})
|
|
|
|
if include_extra is None:
|
|
|
|
include_extra = []
|
|
|
|
|
2020-08-26 11:27:38 +02:00
|
|
|
res = {}
|
|
|
|
|
2020-10-24 22:31:32 +02:00
|
|
|
for key in itertools.chain(
|
|
|
|
self.__table__.columns.keys(),
|
|
|
|
getattr(self, '_dict_show', []),
|
|
|
|
*[extra_keys.get(extra, []) for extra in include_extra]
|
|
|
|
):
|
2020-08-26 11:27:38 +02:00
|
|
|
if key in hide:
|
|
|
|
continue
|
|
|
|
if key in self.__table__.columns:
|
|
|
|
default = self.__table__.columns[key].default
|
|
|
|
if isinstance(default, sqlalchemy.sql.schema.ColumnDefault):
|
|
|
|
default = default.arg
|
|
|
|
else:
|
|
|
|
default = None
|
|
|
|
value = getattr(self, key)
|
|
|
|
if full or ((default or value) and value != default):
|
|
|
|
if key in secret:
|
|
|
|
value = '<hidden>'
|
|
|
|
elif value is not None and key in convert:
|
|
|
|
value = convert[key](value)
|
|
|
|
res[key] = value
|
|
|
|
|
|
|
|
for key in self.__mapper__.relationships.keys():
|
|
|
|
if key in hide:
|
|
|
|
continue
|
|
|
|
if self.__mapper__.relationships[key].uselist:
|
|
|
|
items = getattr(self, key)
|
|
|
|
if self.__mapper__.relationships[key].query_class is not None:
|
|
|
|
if hasattr(items, 'all'):
|
|
|
|
items = items.all()
|
|
|
|
if full or len(items):
|
|
|
|
if key in secret:
|
|
|
|
res[key] = '<hidden>'
|
|
|
|
else:
|
2020-10-24 22:31:32 +02:00
|
|
|
res[key] = [item.to_dict(full, include_secrets, include_extra, True) for item in items]
|
2020-08-26 11:27:38 +02:00
|
|
|
else:
|
|
|
|
value = getattr(self, key)
|
|
|
|
if full or value is not None:
|
|
|
|
if key in secret:
|
|
|
|
res[key] = '<hidden>'
|
|
|
|
else:
|
2020-10-24 22:31:32 +02:00
|
|
|
res[key] = value.to_dict(full, include_secrets, include_extra, True)
|
2020-08-26 11:27:38 +02:00
|
|
|
|
|
|
|
return res
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def from_dict(model, data, delete=False):
|
|
|
|
|
|
|
|
changed = []
|
|
|
|
|
|
|
|
pkey = model._dict_pkey()
|
|
|
|
|
|
|
|
# handle "primary key" only
|
|
|
|
if type(data) is not dict:
|
|
|
|
data = {pkey: data}
|
|
|
|
|
|
|
|
# modify input data
|
|
|
|
if hasattr(model, '_dict_input'):
|
|
|
|
try:
|
|
|
|
model._dict_input(data)
|
|
|
|
except Exception as reason:
|
|
|
|
raise ValueError(f'{reason}', model, None, data)
|
|
|
|
|
|
|
|
# check for primary key (if not recursed)
|
|
|
|
if not getattr(model, '_dict_recurse', False):
|
|
|
|
if not pkey in data:
|
|
|
|
raise KeyError(f'primary key {model.__table__}.{pkey} is missing', model, pkey, data)
|
|
|
|
|
|
|
|
# check data keys and values
|
2020-10-24 22:31:29 +02:00
|
|
|
for key in list(data.keys()):
|
2020-08-26 11:27:38 +02:00
|
|
|
|
|
|
|
# check key
|
2020-08-26 23:16:37 +02:00
|
|
|
if not hasattr(model, key) and not key in model.__mapper__.relationships:
|
2020-08-26 11:27:38 +02:00
|
|
|
raise KeyError(f'unknown key {model.__table__}.{key}', model, key, data)
|
|
|
|
|
|
|
|
# check value type
|
2020-10-24 22:31:29 +02:00
|
|
|
value = data[key]
|
2020-08-26 11:27:38 +02:00
|
|
|
col = model.__mapper__.columns.get(key)
|
|
|
|
if col is not None:
|
2020-08-26 23:16:37 +02:00
|
|
|
if not ((value is None and col.nullable) or (type(value) is col.type.python_type)):
|
2020-08-26 11:27:38 +02:00
|
|
|
raise TypeError(f'{model.__table__}.{key} {value!r} has invalid type {type(value).__name__!r}', model, key, data)
|
|
|
|
else:
|
|
|
|
rel = model.__mapper__.relationships.get(key)
|
|
|
|
if rel is None:
|
|
|
|
itype = getattr(model, '_dict_types', {}).get(key)
|
|
|
|
if itype is not None:
|
2020-10-24 22:31:32 +02:00
|
|
|
if itype is False: # ignore value
|
2020-10-24 22:31:29 +02:00
|
|
|
del data[key]
|
|
|
|
continue
|
|
|
|
elif not isinstance(value, itype):
|
2020-08-26 11:27:38 +02:00
|
|
|
raise TypeError(f'{model.__table__}.{key} {value!r} has invalid type {type(value).__name__!r}', model, key, data)
|
|
|
|
else:
|
|
|
|
raise NotImplementedError(f'type not defined for {model.__table__}.{key}')
|
|
|
|
|
|
|
|
# handle relationships
|
|
|
|
if key in model.__mapper__.relationships:
|
|
|
|
rel_model = model.__mapper__.relationships[key].argument
|
|
|
|
if not isinstance(rel_model, sqlalchemy.orm.Mapper):
|
|
|
|
add = rel_model.from_dict(value, delete)
|
|
|
|
assert len(add) == 1
|
2020-08-26 23:16:37 +02:00
|
|
|
rel_item, updated = add[0]
|
|
|
|
changed.append((rel_item, updated))
|
|
|
|
data[key] = rel_item
|
2020-08-26 11:27:38 +02:00
|
|
|
|
2020-08-26 23:16:37 +02:00
|
|
|
# create item if necessary
|
|
|
|
created = False
|
2020-08-26 11:27:38 +02:00
|
|
|
item = model.query.get(data[pkey]) if pkey in data else None
|
|
|
|
if item is None:
|
|
|
|
|
|
|
|
# check for mandatory keys
|
|
|
|
missing = getattr(model, '_dict_mandatory', set()) - set(data.keys())
|
|
|
|
if missing:
|
|
|
|
raise ValueError(f'mandatory key(s) {", ".join(sorted(missing))} for {model.__table__} missing', model, missing, data)
|
|
|
|
|
2020-08-26 23:16:37 +02:00
|
|
|
# remove mapped relationships from data
|
|
|
|
mapped = {}
|
|
|
|
for key in list(data.keys()):
|
|
|
|
if key in model.__mapper__.relationships:
|
|
|
|
if isinstance(model.__mapper__.relationships[key].argument, sqlalchemy.orm.Mapper):
|
|
|
|
mapped[key] = data[key]
|
|
|
|
del data[key]
|
2020-08-26 11:27:38 +02:00
|
|
|
|
2020-08-26 23:16:37 +02:00
|
|
|
# create new item
|
|
|
|
item = model(**data)
|
|
|
|
created = True
|
2020-08-26 11:27:38 +02:00
|
|
|
|
2020-08-26 23:16:37 +02:00
|
|
|
# and update mapped relationships (below)
|
|
|
|
data = mapped
|
2020-08-26 11:27:38 +02:00
|
|
|
|
2020-08-26 23:16:37 +02:00
|
|
|
# update item
|
|
|
|
updated = []
|
|
|
|
for key, value in data.items():
|
2020-08-26 11:27:38 +02:00
|
|
|
|
2020-08-26 23:16:37 +02:00
|
|
|
# skip primary key
|
|
|
|
if key == pkey:
|
|
|
|
continue
|
|
|
|
|
|
|
|
if key in model.__mapper__.relationships:
|
|
|
|
# update relationship
|
|
|
|
rel_model = model.__mapper__.relationships[key].argument
|
|
|
|
if isinstance(rel_model, sqlalchemy.orm.Mapper):
|
|
|
|
rel_model = rel_model.class_
|
|
|
|
# add (and create) referenced items
|
|
|
|
cur = getattr(item, key)
|
|
|
|
old = sorted(cur, key=lambda i:id(i))
|
|
|
|
new = []
|
|
|
|
for rel_data in value:
|
|
|
|
# get or create related item
|
|
|
|
add = rel_model.from_dict(rel_data, delete)
|
|
|
|
assert len(add) == 1
|
|
|
|
rel_item, rel_updated = add[0]
|
|
|
|
changed.append((rel_item, rel_updated))
|
|
|
|
if rel_item not in cur:
|
|
|
|
cur.append(rel_item)
|
|
|
|
new.append(rel_item)
|
|
|
|
|
|
|
|
# delete referenced items missing in yaml
|
|
|
|
rel_pkey = rel_model._dict_pkey()
|
2020-10-24 22:31:32 +02:00
|
|
|
new_data = list([i.to_dict(True, True, None, True, [rel_pkey]) for i in new])
|
2020-08-26 23:16:37 +02:00
|
|
|
for rel_item in old:
|
|
|
|
if rel_item not in new:
|
|
|
|
# check if item with same data exists to stabilze import without primary key
|
2020-10-24 22:31:32 +02:00
|
|
|
rel_data = rel_item.to_dict(True, True, None, True, [rel_pkey])
|
2020-08-26 23:16:37 +02:00
|
|
|
try:
|
|
|
|
same_idx = new_data.index(rel_data)
|
|
|
|
except ValueError:
|
|
|
|
same = None
|
|
|
|
else:
|
|
|
|
same = new[same_idx]
|
|
|
|
|
|
|
|
if same is None:
|
|
|
|
# delete items missing in new
|
|
|
|
if delete:
|
|
|
|
cur.remove(rel_item)
|
2020-08-26 11:27:38 +02:00
|
|
|
else:
|
|
|
|
new.append(rel_item)
|
2020-08-26 23:16:37 +02:00
|
|
|
else:
|
|
|
|
# swap found item with same data with newly created item
|
|
|
|
new.append(rel_item)
|
|
|
|
new_data.append(rel_data)
|
|
|
|
new.remove(same)
|
|
|
|
del new_data[same_idx]
|
2020-12-16 23:39:50 +02:00
|
|
|
for i, (ch_item, _) in enumerate(changed):
|
2020-08-26 23:16:37 +02:00
|
|
|
if ch_item is same:
|
|
|
|
changed[i] = (rel_item, [])
|
|
|
|
db.session.flush()
|
|
|
|
db.session.delete(ch_item)
|
|
|
|
break
|
|
|
|
|
|
|
|
# remember changes
|
|
|
|
new = sorted(new, key=lambda i:id(i))
|
|
|
|
if new != old:
|
|
|
|
updated.append((key, old, new))
|
|
|
|
|
|
|
|
else:
|
|
|
|
# update key
|
|
|
|
old = getattr(item, key)
|
|
|
|
if type(old) is list:
|
|
|
|
# deduplicate list value
|
|
|
|
assert type(value) is list
|
|
|
|
value = set(value)
|
|
|
|
old = set(old)
|
|
|
|
if not delete:
|
|
|
|
value = old | value
|
|
|
|
if value != old:
|
|
|
|
updated.append((key, old, value))
|
|
|
|
setattr(item, key, value)
|
|
|
|
|
|
|
|
changed.append((item, created if created else updated))
|
2020-08-26 11:27:38 +02:00
|
|
|
|
|
|
|
return changed
|
|
|
|
|
2018-10-18 15:57:43 +02:00
|
|
|
|
2018-04-21 15:48:07 +02:00
|
|
|
# Many-to-many association table for domain managers
|
2018-12-09 17:06:53 +02:00
|
|
|
managers = db.Table('manager', Base.metadata,
|
2018-04-21 15:48:07 +02:00
|
|
|
db.Column('domain_name', IdnaDomain, db.ForeignKey('domain.name')),
|
|
|
|
db.Column('user_email', IdnaEmail, db.ForeignKey('user.email'))
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2018-12-09 17:06:53 +02:00
|
|
|
class Config(Base):
|
|
|
|
""" In-database configuration values
|
2016-03-20 12:31:14 +02:00
|
|
|
"""
|
|
|
|
|
2018-12-09 17:06:53 +02:00
|
|
|
name = db.Column(db.String(255), primary_key=True, nullable=False)
|
|
|
|
value = db.Column(JSONEncoded)
|
2016-03-20 12:31:14 +02:00
|
|
|
|
|
|
|
|
|
|
|
class Domain(Base):
|
2016-03-19 21:37:48 +02:00
|
|
|
""" A DNS domain that has mail addresses associated to it.
|
|
|
|
"""
|
2016-10-16 17:18:00 +02:00
|
|
|
__tablename__ = "domain"
|
|
|
|
|
2020-08-26 11:27:38 +02:00
|
|
|
_dict_hide = {'users', 'managers', 'aliases'}
|
2020-10-24 22:31:32 +02:00
|
|
|
_dict_show = {'dkim_key'}
|
|
|
|
_dict_extra = {'dns':{'dkim_publickey', 'dns_mx', 'dns_spf', 'dns_dkim', 'dns_dmarc'}}
|
2020-08-26 11:27:38 +02:00
|
|
|
_dict_secret = {'dkim_key'}
|
2020-10-24 22:31:32 +02:00
|
|
|
_dict_types = {
|
|
|
|
'dkim_key': (bytes, type(None)),
|
|
|
|
'dkim_publickey': False,
|
|
|
|
'dns_mx': False,
|
|
|
|
'dns_spf': False,
|
|
|
|
'dns_dkim': False,
|
|
|
|
'dns_dmarc': False,
|
|
|
|
}
|
|
|
|
_dict_output = {'dkim_key': lambda key: key.decode('utf-8').strip().split('\n')[1:-1]}
|
2020-08-26 11:27:38 +02:00
|
|
|
@staticmethod
|
|
|
|
def _dict_input(data):
|
2020-08-26 23:16:37 +02:00
|
|
|
if 'dkim_key' in data:
|
2020-08-26 11:27:38 +02:00
|
|
|
key = data['dkim_key']
|
2020-10-24 22:31:13 +02:00
|
|
|
if key is not None:
|
2020-08-26 23:16:37 +02:00
|
|
|
if type(key) is list:
|
|
|
|
key = ''.join(key)
|
|
|
|
if type(key) is str:
|
2020-10-24 22:31:13 +02:00
|
|
|
key = ''.join(key.strip().split()) # removes all whitespace
|
2020-10-24 22:31:26 +02:00
|
|
|
if key == 'generate':
|
|
|
|
data['dkim_key'] = dkim.gen_key()
|
|
|
|
elif key:
|
2020-10-24 22:31:13 +02:00
|
|
|
m = re.match('^-----BEGIN (RSA )?PRIVATE KEY-----', key)
|
|
|
|
if m is not None:
|
|
|
|
key = key[m.end():]
|
|
|
|
m = re.search('-----END (RSA )?PRIVATE KEY-----$', key)
|
|
|
|
if m is not None:
|
|
|
|
key = key[:m.start()]
|
|
|
|
key = '\n'.join(wrap(key, 64))
|
|
|
|
key = f'-----BEGIN PRIVATE KEY-----\n{key}\n-----END PRIVATE KEY-----\n'.encode('ascii')
|
|
|
|
try:
|
|
|
|
dkim.strip_key(key)
|
|
|
|
except:
|
|
|
|
raise ValueError('invalid dkim key')
|
|
|
|
else:
|
2020-10-24 22:31:26 +02:00
|
|
|
data['dkim_key'] = key
|
2020-10-24 22:31:13 +02:00
|
|
|
else:
|
|
|
|
data['dkim_key'] = None
|
2020-08-26 11:27:38 +02:00
|
|
|
|
2018-04-14 13:00:29 +02:00
|
|
|
name = db.Column(IdnaDomain, primary_key=True, nullable=False)
|
2016-04-24 19:17:40 +02:00
|
|
|
managers = db.relationship('User', secondary=managers,
|
|
|
|
backref=db.backref('manager_of'), lazy='dynamic')
|
2019-01-05 14:45:55 +02:00
|
|
|
max_users = db.Column(db.Integer, nullable=False, default=-1)
|
|
|
|
max_aliases = db.Column(db.Integer, nullable=False, default=-1)
|
2019-01-13 17:40:28 +02:00
|
|
|
max_quota_bytes = db.Column(db.BigInteger(), nullable=False, default=0)
|
2017-12-03 13:01:25 +02:00
|
|
|
signup_enabled = db.Column(db.Boolean(), nullable=False, default=False)
|
2016-02-20 14:57:26 +02:00
|
|
|
|
2020-10-24 22:31:32 +02:00
|
|
|
def _dkim_file(self):
|
|
|
|
return app.config["DKIM_PATH"].format(
|
|
|
|
domain=self.name, selector=app.config["DKIM_SELECTOR"])
|
|
|
|
|
|
|
|
@property
|
|
|
|
def dns_mx(self):
|
|
|
|
hostname = app.config['HOSTNAMES'].split(',')[0]
|
|
|
|
return f'{self.name}. 600 IN MX 10 {hostname}.'
|
|
|
|
|
|
|
|
@property
|
|
|
|
def dns_spf(self):
|
|
|
|
hostname = app.config['HOSTNAMES'].split(',')[0]
|
|
|
|
return f'{self.name}. 600 IN TXT "v=spf1 mx a:{hostname} ~all"'
|
|
|
|
|
|
|
|
@property
|
|
|
|
def dns_dkim(self):
|
|
|
|
if os.path.exists(self._dkim_file()):
|
|
|
|
selector = app.config['DKIM_SELECTOR']
|
|
|
|
return f'{selector}._domainkey.{self.name}. 600 IN TXT "v=DKIM1; k=rsa; p={self.dkim_publickey}"'
|
|
|
|
|
|
|
|
@property
|
|
|
|
def dns_dmarc(self):
|
|
|
|
if os.path.exists(self._dkim_file()):
|
|
|
|
domain = app.config['DOMAIN']
|
|
|
|
rua = app.config['DMARC_RUA']
|
|
|
|
rua = f' rua=mailto:{rua}@{domain};' if rua else ''
|
|
|
|
ruf = app.config['DMARC_RUF']
|
|
|
|
ruf = f' ruf=mailto:{ruf}@{domain};' if ruf else ''
|
|
|
|
return f'_dmarc.{self.name}. 600 IN TXT "v=DMARC1; p=reject;{rua}{ruf} adkim=s; aspf=s"'
|
|
|
|
|
2016-06-25 15:50:05 +02:00
|
|
|
@property
|
|
|
|
def dkim_key(self):
|
2020-10-24 22:31:32 +02:00
|
|
|
file_path = self._dkim_file()
|
2016-06-25 15:50:05 +02:00
|
|
|
if os.path.exists(file_path):
|
|
|
|
with open(file_path, "rb") as handle:
|
|
|
|
return handle.read()
|
|
|
|
|
|
|
|
@dkim_key.setter
|
|
|
|
def dkim_key(self, value):
|
2020-10-24 22:31:32 +02:00
|
|
|
file_path = self._dkim_file()
|
2020-10-24 22:31:13 +02:00
|
|
|
if value is None:
|
|
|
|
if os.path.exists(file_path):
|
|
|
|
os.unlink(file_path)
|
|
|
|
else:
|
|
|
|
with open(file_path, "wb") as handle:
|
|
|
|
handle.write(value)
|
2016-06-25 15:50:05 +02:00
|
|
|
|
|
|
|
@property
|
|
|
|
def dkim_publickey(self):
|
2016-06-25 16:06:52 +02:00
|
|
|
dkim_key = self.dkim_key
|
|
|
|
if dkim_key:
|
|
|
|
return dkim.strip_key(self.dkim_key).decode("utf8")
|
2016-06-25 15:50:05 +02:00
|
|
|
|
|
|
|
def generate_dkim_key(self):
|
|
|
|
self.dkim_key = dkim.gen_key()
|
|
|
|
|
2016-05-01 20:04:40 +02:00
|
|
|
def has_email(self, localpart):
|
|
|
|
for email in self.users + self.aliases:
|
|
|
|
if email.localpart == localpart:
|
2016-03-22 20:47:15 +02:00
|
|
|
return True
|
|
|
|
else:
|
|
|
|
return False
|
|
|
|
|
2018-04-21 13:56:20 +02:00
|
|
|
def check_mx(self):
|
|
|
|
try:
|
|
|
|
hostnames = app.config['HOSTNAMES'].split(',')
|
|
|
|
return any(
|
|
|
|
str(rset).split()[-1][:-1] in hostnames
|
|
|
|
for rset in dns.resolver.query(self.name, 'MX')
|
|
|
|
)
|
2020-08-26 11:11:23 +02:00
|
|
|
except Exception:
|
2018-04-21 13:56:20 +02:00
|
|
|
return False
|
|
|
|
|
2016-02-20 14:57:26 +02:00
|
|
|
def __str__(self):
|
|
|
|
return self.name
|
|
|
|
|
2016-09-29 12:10:58 +02:00
|
|
|
def __eq__(self, other):
|
|
|
|
try:
|
|
|
|
return self.name == other.name
|
|
|
|
except AttributeError:
|
|
|
|
return False
|
|
|
|
|
2016-02-20 14:57:26 +02:00
|
|
|
|
2017-09-03 18:30:00 +02:00
|
|
|
class Alternative(Base):
|
|
|
|
""" Alternative name for a served domain.
|
|
|
|
The name "domain alias" was avoided to prevent some confusion.
|
|
|
|
"""
|
|
|
|
|
|
|
|
__tablename__ = "alternative"
|
|
|
|
|
2018-04-14 13:00:29 +02:00
|
|
|
name = db.Column(IdnaDomain, primary_key=True, nullable=False)
|
|
|
|
domain_name = db.Column(IdnaDomain, db.ForeignKey(Domain.name))
|
2017-09-03 18:30:00 +02:00
|
|
|
domain = db.relationship(Domain,
|
|
|
|
backref=db.backref('alternatives', cascade='all, delete-orphan'))
|
|
|
|
|
|
|
|
def __str__(self):
|
|
|
|
return self.name
|
|
|
|
|
|
|
|
|
2017-09-10 20:30:03 +02:00
|
|
|
class Relay(Base):
|
|
|
|
""" Relayed mail domain.
|
|
|
|
The domain is either relayed publicly or through a specified SMTP host.
|
|
|
|
"""
|
|
|
|
|
|
|
|
__tablename__ = "relay"
|
|
|
|
|
2020-08-26 11:27:38 +02:00
|
|
|
_dict_mandatory = {'smtp'}
|
|
|
|
|
2018-12-09 17:54:54 +02:00
|
|
|
name = db.Column(IdnaDomain, primary_key=True, nullable=False)
|
2018-04-12 22:21:28 +02:00
|
|
|
smtp = db.Column(db.String(80), nullable=True)
|
2017-09-10 20:30:03 +02:00
|
|
|
|
|
|
|
def __str__(self):
|
|
|
|
return self.name
|
|
|
|
|
|
|
|
|
2016-10-16 17:18:00 +02:00
|
|
|
class Email(object):
|
2016-05-01 20:04:40 +02:00
|
|
|
""" Abstraction for an email address (localpart and domain).
|
2016-03-19 21:37:48 +02:00
|
|
|
"""
|
|
|
|
|
2016-04-20 21:14:04 +02:00
|
|
|
localpart = db.Column(db.String(80), nullable=False)
|
2016-03-19 21:37:48 +02:00
|
|
|
|
2020-08-26 11:27:38 +02:00
|
|
|
@staticmethod
|
|
|
|
def _dict_input(data):
|
|
|
|
if 'email' in data:
|
|
|
|
if 'localpart' in data or 'domain' in data:
|
|
|
|
raise ValueError('ambigous key email and localpart/domain')
|
|
|
|
elif type(data['email']) is str:
|
|
|
|
data['localpart'], data['domain'] = data['email'].rsplit('@', 1)
|
|
|
|
else:
|
|
|
|
data['email'] = f"{data['localpart']}@{data['domain']}"
|
|
|
|
|
2016-03-19 21:37:48 +02:00
|
|
|
@declarative.declared_attr
|
|
|
|
def domain_name(cls):
|
2018-04-14 13:00:29 +02:00
|
|
|
return db.Column(IdnaDomain, db.ForeignKey(Domain.name),
|
|
|
|
nullable=False, default=IdnaDomain)
|
2016-02-20 14:57:26 +02:00
|
|
|
|
2016-04-20 21:14:04 +02:00
|
|
|
# This field is redundant with both localpart and domain name.
|
|
|
|
# It is however very useful for quick lookups without joining tables,
|
2018-04-14 13:00:29 +02:00
|
|
|
# especially when the mail server is reading the database.
|
2016-04-20 21:14:04 +02:00
|
|
|
@declarative.declared_attr
|
2016-05-01 20:04:40 +02:00
|
|
|
def email(cls):
|
2016-04-20 21:14:04 +02:00
|
|
|
updater = lambda context: "{0}@{1}".format(
|
|
|
|
context.current_parameters["localpart"],
|
2018-04-14 13:00:29 +02:00
|
|
|
context.current_parameters["domain_name"],
|
2016-04-20 21:14:04 +02:00
|
|
|
)
|
2018-04-14 13:00:29 +02:00
|
|
|
return db.Column(IdnaEmail,
|
2016-04-20 21:14:04 +02:00
|
|
|
primary_key=True, nullable=False,
|
|
|
|
default=updater)
|
2016-02-20 14:57:26 +02:00
|
|
|
|
2017-11-10 12:55:58 +02:00
|
|
|
def sendmail(self, subject, body):
|
|
|
|
""" Send an email to the address.
|
|
|
|
"""
|
2018-06-02 10:59:57 +02:00
|
|
|
from_address = "{0}@{1}".format(
|
|
|
|
app.config['POSTMASTER'],
|
|
|
|
idna.encode(app.config['DOMAIN']).decode('ascii'),
|
|
|
|
)
|
2018-04-14 23:47:41 +02:00
|
|
|
with smtplib.SMTP(app.config['HOST_AUTHSMTP'], port=10025) as smtp:
|
2018-06-02 10:59:57 +02:00
|
|
|
to_address = "{0}@{1}".format(
|
|
|
|
self.localpart,
|
|
|
|
idna.encode(self.domain_name).decode('ascii'),
|
|
|
|
)
|
2017-11-10 12:55:58 +02:00
|
|
|
msg = text.MIMEText(body)
|
|
|
|
msg['Subject'] = subject
|
|
|
|
msg['From'] = from_address
|
2018-06-02 10:59:57 +02:00
|
|
|
msg['To'] = to_address
|
|
|
|
smtp.sendmail(from_address, [to_address], msg.as_string())
|
|
|
|
|
2018-10-07 16:24:48 +02:00
|
|
|
@classmethod
|
|
|
|
def resolve_domain(cls, email):
|
|
|
|
localpart, domain_name = email.split('@', 1) if '@' in email else (None, email)
|
2018-10-23 10:53:52 +02:00
|
|
|
alternative = Alternative.query.get(domain_name)
|
2018-10-07 16:24:48 +02:00
|
|
|
if alternative:
|
|
|
|
domain_name = alternative.domain_name
|
|
|
|
return (localpart, domain_name)
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def resolve_destination(cls, localpart, domain_name, ignore_forward_keep=False):
|
2018-12-27 16:36:24 +02:00
|
|
|
localpart_stripped = None
|
2019-01-10 18:30:11 +02:00
|
|
|
stripped_alias = None
|
2019-01-10 11:28:57 +02:00
|
|
|
|
2018-12-27 16:36:24 +02:00
|
|
|
if os.environ.get('RECIPIENT_DELIMITER') in localpart:
|
|
|
|
localpart_stripped = localpart.rsplit(os.environ.get('RECIPIENT_DELIMITER'), 1)[0]
|
|
|
|
|
2018-10-23 10:53:52 +02:00
|
|
|
user = User.query.get('{}@{}'.format(localpart, domain_name))
|
2018-12-27 16:36:24 +02:00
|
|
|
if not user and localpart_stripped:
|
|
|
|
user = User.query.get('{}@{}'.format(localpart_stripped, domain_name))
|
2018-10-07 16:24:48 +02:00
|
|
|
if user:
|
2019-12-27 22:11:50 +02:00
|
|
|
email = '{}@{}'.format(localpart, domain_name)
|
|
|
|
|
2018-10-07 16:24:48 +02:00
|
|
|
if user.forward_enabled:
|
|
|
|
destination = user.forward_destination
|
|
|
|
if user.forward_keep or ignore_forward_keep:
|
2019-12-27 22:11:50 +02:00
|
|
|
destination.append(email)
|
2018-10-07 16:24:48 +02:00
|
|
|
else:
|
2019-12-27 22:11:50 +02:00
|
|
|
destination = [email]
|
2018-10-07 16:24:48 +02:00
|
|
|
return destination
|
2017-11-10 12:55:58 +02:00
|
|
|
|
2019-01-11 12:51:56 +02:00
|
|
|
pure_alias = Alias.resolve(localpart, domain_name)
|
|
|
|
stripped_alias = Alias.resolve(localpart_stripped, domain_name)
|
|
|
|
|
|
|
|
if pure_alias and not pure_alias.wildcard:
|
|
|
|
return pure_alias.destination
|
|
|
|
elif stripped_alias:
|
|
|
|
return stripped_alias.destination
|
|
|
|
elif pure_alias:
|
|
|
|
return pure_alias.destination
|
|
|
|
|
2016-04-20 21:14:04 +02:00
|
|
|
def __str__(self):
|
2016-05-01 20:04:40 +02:00
|
|
|
return self.email
|
2016-02-20 14:57:26 +02:00
|
|
|
|
2016-03-19 21:37:48 +02:00
|
|
|
|
2016-10-16 17:18:00 +02:00
|
|
|
class User(Base, Email):
|
2016-05-01 20:04:40 +02:00
|
|
|
""" A user is an email address that has a password to access a mailbox.
|
2016-03-19 21:37:48 +02:00
|
|
|
"""
|
2016-10-16 17:18:00 +02:00
|
|
|
__tablename__ = "user"
|
|
|
|
|
2020-08-26 11:27:38 +02:00
|
|
|
_dict_hide = {'domain_name', 'domain', 'localpart', 'quota_bytes_used'}
|
|
|
|
_dict_mandatory = {'localpart', 'domain', 'password'}
|
|
|
|
@classmethod
|
|
|
|
def _dict_input(cls, data):
|
|
|
|
Email._dict_input(data)
|
|
|
|
# handle password
|
|
|
|
if 'password' in data:
|
|
|
|
if 'password_hash' in data or 'hash_scheme' in data:
|
|
|
|
raise ValueError('ambigous key password and password_hash/hash_scheme')
|
|
|
|
# check (hashed) password
|
|
|
|
password = data['password']
|
|
|
|
if password.startswith('{') and '}' in password:
|
|
|
|
scheme = password[1:password.index('}')]
|
|
|
|
if scheme not in cls.scheme_dict:
|
|
|
|
raise ValueError(f'invalid password scheme {scheme!r}')
|
|
|
|
else:
|
|
|
|
raise ValueError(f'invalid hashed password {password!r}')
|
|
|
|
elif 'password_hash' in data and 'hash_scheme' in data:
|
|
|
|
if data['hash_scheme'] not in cls.scheme_dict:
|
|
|
|
raise ValueError(f'invalid password scheme {scheme!r}')
|
|
|
|
data['password'] = '{'+data['hash_scheme']+'}'+ data['password_hash']
|
2020-08-26 23:16:37 +02:00
|
|
|
del data['hash_scheme']
|
|
|
|
del data['password_hash']
|
2020-08-26 11:27:38 +02:00
|
|
|
|
2016-08-11 14:13:56 +02:00
|
|
|
domain = db.relationship(Domain,
|
|
|
|
backref=db.backref('users', cascade='all, delete-orphan'))
|
2016-03-19 21:37:48 +02:00
|
|
|
password = db.Column(db.String(255), nullable=False)
|
2019-01-13 17:40:28 +02:00
|
|
|
quota_bytes = db.Column(db.BigInteger(), nullable=False, default=10**9)
|
|
|
|
quota_bytes_used = db.Column(db.BigInteger(), nullable=False, default=0)
|
2016-03-19 21:37:48 +02:00
|
|
|
global_admin = db.Column(db.Boolean(), nullable=False, default=False)
|
2018-04-15 11:23:58 +02:00
|
|
|
enabled = db.Column(db.Boolean(), nullable=False, default=True)
|
2016-03-19 21:37:48 +02:00
|
|
|
|
2016-03-22 22:05:08 +02:00
|
|
|
# Features
|
|
|
|
enable_imap = db.Column(db.Boolean(), nullable=False, default=True)
|
|
|
|
enable_pop = db.Column(db.Boolean(), nullable=False, default=True)
|
|
|
|
|
2016-03-20 12:00:01 +02:00
|
|
|
# Filters
|
2016-05-04 16:12:56 +02:00
|
|
|
forward_enabled = db.Column(db.Boolean(), nullable=False, default=False)
|
2018-12-04 16:22:18 +02:00
|
|
|
forward_destination = db.Column(CommaSeparatedList(), nullable=True, default=[])
|
2017-09-03 15:43:30 +02:00
|
|
|
forward_keep = db.Column(db.Boolean(), nullable=False, default=True)
|
2016-05-04 16:12:56 +02:00
|
|
|
reply_enabled = db.Column(db.Boolean(), nullable=False, default=False)
|
2016-03-20 12:14:27 +02:00
|
|
|
reply_subject = db.Column(db.String(255), nullable=True, default=None)
|
2016-03-20 12:00:01 +02:00
|
|
|
reply_body = db.Column(db.Text(), nullable=True, default=None)
|
2018-09-27 22:45:16 +02:00
|
|
|
reply_startdate = db.Column(db.Date, nullable=False,
|
|
|
|
default=date(1900, 1, 1))
|
2017-11-10 16:25:30 +02:00
|
|
|
reply_enddate = db.Column(db.Date, nullable=False,
|
|
|
|
default=date(2999, 12, 31))
|
2016-03-20 12:00:01 +02:00
|
|
|
|
2016-03-20 12:09:06 +02:00
|
|
|
# Settings
|
|
|
|
displayed_name = db.Column(db.String(160), nullable=False, default="")
|
|
|
|
spam_enabled = db.Column(db.Boolean(), nullable=False, default=True)
|
2018-12-09 17:06:53 +02:00
|
|
|
spam_threshold = db.Column(db.Integer(), nullable=False, default=80)
|
2016-03-20 12:09:06 +02:00
|
|
|
|
|
|
|
# Flask-login attributes
|
2016-03-19 21:37:48 +02:00
|
|
|
is_authenticated = True
|
|
|
|
is_active = True
|
|
|
|
is_anonymous = False
|
|
|
|
|
2016-04-20 21:14:04 +02:00
|
|
|
def get_id(self):
|
2016-05-01 20:04:40 +02:00
|
|
|
return self.email
|
2016-04-20 21:14:04 +02:00
|
|
|
|
2018-04-21 15:48:07 +02:00
|
|
|
@property
|
2018-07-26 21:38:21 +02:00
|
|
|
def destination(self):
|
2018-09-26 00:14:46 +02:00
|
|
|
if self.forward_enabled:
|
2020-08-26 11:19:01 +02:00
|
|
|
result = list(self.forward_destination)
|
2018-07-26 21:38:21 +02:00
|
|
|
if self.forward_keep:
|
2020-08-26 11:19:01 +02:00
|
|
|
result.append(self.email)
|
|
|
|
return ','.join(result)
|
2018-07-26 21:38:21 +02:00
|
|
|
else:
|
|
|
|
return self.email
|
2018-04-21 15:48:07 +02:00
|
|
|
|
2018-10-15 21:52:06 +02:00
|
|
|
@property
|
|
|
|
def reply_active(self):
|
2018-10-16 20:38:18 +02:00
|
|
|
now = date.today()
|
2018-10-15 21:52:06 +02:00
|
|
|
return (
|
|
|
|
self.reply_enabled and
|
|
|
|
self.reply_startdate < now and
|
|
|
|
self.reply_enddate > now
|
|
|
|
)
|
2018-04-21 15:48:07 +02:00
|
|
|
|
2018-10-17 21:21:35 +02:00
|
|
|
scheme_dict = {'PBKDF2': "pbkdf2_sha512",
|
|
|
|
'BLF-CRYPT': "bcrypt",
|
2018-10-10 19:29:23 +02:00
|
|
|
'SHA512-CRYPT': "sha512_crypt",
|
2017-10-29 14:44:37 +02:00
|
|
|
'SHA256-CRYPT': "sha256_crypt",
|
|
|
|
'MD5-CRYPT': "md5_crypt",
|
2017-08-24 16:23:54 +02:00
|
|
|
'CRYPT': "des_crypt"}
|
2018-10-18 17:55:07 +02:00
|
|
|
|
|
|
|
def get_password_context(self):
|
|
|
|
return context.CryptContext(
|
|
|
|
schemes=self.scheme_dict.values(),
|
|
|
|
default=self.scheme_dict[app.config['PASSWORD_SCHEME']],
|
|
|
|
)
|
2016-03-19 21:37:48 +02:00
|
|
|
|
|
|
|
def check_password(self, password):
|
Fix password context
Fixes the following error:
```
admin_1 | [2018-11-09 09:44:10,533] ERROR in app: Exception on /internal/auth/email [GET]
admin_1 | Traceback (most recent call last):
admin_1 | File "/usr/lib/python3.6/site-packages/flask/app.py", line 2292, in wsgi_app
admin_1 | response = self.full_dispatch_request()
admin_1 | File "/usr/lib/python3.6/site-packages/flask/app.py", line 1815, in full_dispatch_request
admin_1 | rv = self.handle_user_exception(e)
admin_1 | File "/usr/lib/python3.6/site-packages/flask/app.py", line 1718, in handle_user_exception
admin_1 | reraise(exc_type, exc_value, tb)
admin_1 | File "/usr/lib/python3.6/site-packages/flask/_compat.py", line 35, in reraise
admin_1 | raise value
admin_1 | File "/usr/lib/python3.6/site-packages/flask/app.py", line 1813, in full_dispatch_request
admin_1 | rv = self.dispatch_request()
admin_1 | File "/usr/lib/python3.6/site-packages/flask/app.py", line 1799, in dispatch_request
admin_1 | return self.view_functions[rule.endpoint](**req.view_args)
admin_1 | File "/usr/lib/python3.6/site-packages/flask_limiter/extension.py", line 544, in __inner
admin_1 | return obj(*a, **k)
admin_1 | File "/app/mailu/internal/views/auth.py", line 18, in nginx_authentication
admin_1 | headers = nginx.handle_authentication(flask.request.headers)
admin_1 | File "/app/mailu/internal/nginx.py", line 48, in handle_authentication
admin_1 | if user.check_password(password):
admin_1 | File "/app/mailu/models.py", line 333, in check_password
admin_1 | context = User.pw_context
admin_1 | AttributeError: type object 'User' has no attribute 'pw_context'
```
2018-11-09 11:45:08 +02:00
|
|
|
context = self.get_password_context()
|
2016-03-20 16:36:56 +02:00
|
|
|
reference = re.match('({[^}]+})?(.*)', self.password).group(2)
|
2018-10-17 21:21:35 +02:00
|
|
|
result = context.verify(password, reference)
|
|
|
|
if result and context.identify(reference) != context.default_scheme():
|
|
|
|
self.set_password(password)
|
|
|
|
db.session.add(self)
|
|
|
|
db.session.commit()
|
|
|
|
return result
|
2016-03-19 21:37:48 +02:00
|
|
|
|
2018-10-18 17:55:07 +02:00
|
|
|
def set_password(self, password, hash_scheme=None, raw=False):
|
2017-08-24 16:23:54 +02:00
|
|
|
"""Set password for user with specified encryption scheme
|
|
|
|
@password: plain text password to encrypt (if raw == True the hash itself)
|
|
|
|
"""
|
2018-10-18 17:55:07 +02:00
|
|
|
if hash_scheme is None:
|
|
|
|
hash_scheme = app.config['PASSWORD_SCHEME']
|
2017-08-24 16:23:54 +02:00
|
|
|
# for the list of hash schemes see https://wiki2.dovecot.org/Authentication/PasswordSchemes
|
|
|
|
if raw:
|
|
|
|
self.password = '{'+hash_scheme+'}' + password
|
|
|
|
else:
|
2018-10-18 17:55:07 +02:00
|
|
|
self.password = '{'+hash_scheme+'}' + self.get_password_context().encrypt(password, self.scheme_dict[hash_scheme])
|
2016-03-19 21:37:48 +02:00
|
|
|
|
|
|
|
def get_managed_domains(self):
|
|
|
|
if self.global_admin:
|
|
|
|
return Domain.query.all()
|
|
|
|
else:
|
2016-04-24 19:42:02 +02:00
|
|
|
return self.manager_of
|
2016-03-19 21:37:48 +02:00
|
|
|
|
2016-06-19 15:34:14 +02:00
|
|
|
def get_managed_emails(self, include_aliases=True):
|
2016-05-01 20:04:40 +02:00
|
|
|
emails = []
|
2016-03-22 21:34:21 +02:00
|
|
|
for domain in self.get_managed_domains():
|
2016-05-01 20:04:40 +02:00
|
|
|
emails.extend(domain.users)
|
2016-06-19 15:34:14 +02:00
|
|
|
if include_aliases:
|
|
|
|
emails.extend(domain.aliases)
|
2016-05-01 20:04:40 +02:00
|
|
|
return emails
|
2016-03-22 21:34:21 +02:00
|
|
|
|
2017-11-10 12:55:58 +02:00
|
|
|
def send_welcome(self):
|
2019-01-13 11:17:38 +02:00
|
|
|
if app.config["WELCOME"]:
|
2017-11-10 12:55:58 +02:00
|
|
|
self.sendmail(app.config["WELCOME_SUBJECT"],
|
|
|
|
app.config["WELCOME_BODY"])
|
|
|
|
|
2018-11-08 21:30:41 +02:00
|
|
|
@classmethod
|
|
|
|
def get(cls, email):
|
|
|
|
return cls.query.get(email)
|
|
|
|
|
2016-03-19 21:37:48 +02:00
|
|
|
@classmethod
|
|
|
|
def login(cls, email, password):
|
2016-05-01 20:09:47 +02:00
|
|
|
user = cls.query.get(email)
|
2018-04-15 13:42:08 +02:00
|
|
|
return user if (user and user.enabled and user.check_password(password)) else None
|
2016-03-19 21:37:48 +02:00
|
|
|
|
|
|
|
|
2016-10-16 17:18:00 +02:00
|
|
|
class Alias(Base, Email):
|
2016-05-01 20:04:40 +02:00
|
|
|
""" An alias is an email address that redirects to some destination.
|
2016-03-19 21:37:48 +02:00
|
|
|
"""
|
2016-10-16 17:18:00 +02:00
|
|
|
__tablename__ = "alias"
|
|
|
|
|
2020-08-26 11:27:38 +02:00
|
|
|
_dict_hide = {'domain_name', 'domain', 'localpart'}
|
|
|
|
@staticmethod
|
|
|
|
def _dict_input(data):
|
2020-08-26 23:16:37 +02:00
|
|
|
Email._dict_input(data)
|
2020-08-26 11:27:38 +02:00
|
|
|
# handle comma delimited string for backwards compability
|
|
|
|
dst = data.get('destination')
|
|
|
|
if type(dst) is str:
|
|
|
|
data['destination'] = list([adr.strip() for adr in dst.split(',')])
|
|
|
|
|
2016-08-11 14:13:56 +02:00
|
|
|
domain = db.relationship(Domain,
|
|
|
|
backref=db.backref('aliases', cascade='all, delete-orphan'))
|
2016-08-20 12:23:55 +02:00
|
|
|
wildcard = db.Column(db.Boolean(), nullable=False, default=False)
|
2016-08-11 16:32:50 +02:00
|
|
|
destination = db.Column(CommaSeparatedList, nullable=False, default=[])
|
2016-04-28 20:07:38 +02:00
|
|
|
|
2018-07-26 21:38:21 +02:00
|
|
|
@classmethod
|
|
|
|
def resolve(cls, localpart, domain_name):
|
2019-04-14 13:37:09 +02:00
|
|
|
alias_preserve_case = cls.query.filter(
|
|
|
|
sqlalchemy.and_(cls.domain_name == domain_name,
|
|
|
|
sqlalchemy.or_(
|
|
|
|
sqlalchemy.and_(
|
|
|
|
cls.wildcard == False,
|
|
|
|
cls.localpart == localpart
|
|
|
|
), sqlalchemy.and_(
|
|
|
|
cls.wildcard == True,
|
|
|
|
sqlalchemy.bindparam("l", localpart).like(cls.localpart)
|
|
|
|
)
|
2018-09-27 14:53:23 +02:00
|
|
|
)
|
2018-07-26 21:38:21 +02:00
|
|
|
)
|
2019-04-14 13:37:09 +02:00
|
|
|
).order_by(cls.wildcard, sqlalchemy.func.char_length(cls.localpart).desc()).first()
|
2018-07-26 21:38:21 +02:00
|
|
|
|
2020-03-06 14:50:39 +02:00
|
|
|
localpart_lower = localpart.lower() if localpart else None
|
|
|
|
alias_lower_case = cls.query.filter(
|
2019-04-14 13:37:09 +02:00
|
|
|
sqlalchemy.and_(cls.domain_name == domain_name,
|
|
|
|
sqlalchemy.or_(
|
|
|
|
sqlalchemy.and_(
|
|
|
|
cls.wildcard == False,
|
2020-03-06 14:50:39 +02:00
|
|
|
sqlalchemy.func.lower(cls.localpart) == localpart_lower
|
2019-04-14 13:37:09 +02:00
|
|
|
), sqlalchemy.and_(
|
|
|
|
cls.wildcard == True,
|
2020-03-06 14:50:39 +02:00
|
|
|
sqlalchemy.bindparam("l", localpart_lower).like(sqlalchemy.func.lower(cls.localpart))
|
2019-04-14 13:37:09 +02:00
|
|
|
)
|
|
|
|
)
|
|
|
|
)
|
|
|
|
).order_by(cls.wildcard, sqlalchemy.func.char_length(sqlalchemy.func.lower(cls.localpart)).desc()).first()
|
2016-04-28 20:07:38 +02:00
|
|
|
|
2020-03-06 14:50:39 +02:00
|
|
|
if alias_preserve_case and alias_lower_case:
|
|
|
|
if alias_preserve_case.wildcard:
|
|
|
|
return alias_lower_case
|
|
|
|
else:
|
|
|
|
return alias_preserve_case
|
|
|
|
elif alias_preserve_case and not alias_lower_case:
|
|
|
|
return alias_preserve_case
|
|
|
|
elif alias_lower_case and not alias_preserve_case:
|
|
|
|
return alias_lower_case
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
2017-10-29 15:48:34 +02:00
|
|
|
class Token(Base):
|
|
|
|
""" A token is an application password for a given user.
|
|
|
|
"""
|
|
|
|
__tablename__ = "token"
|
|
|
|
|
2020-08-26 11:27:38 +02:00
|
|
|
_dict_recurse = True
|
|
|
|
_dict_hide = {'user', 'user_email'}
|
|
|
|
_dict_mandatory = {'password'}
|
|
|
|
|
2017-10-29 15:48:34 +02:00
|
|
|
id = db.Column(db.Integer(), primary_key=True)
|
|
|
|
user_email = db.Column(db.String(255), db.ForeignKey(User.email),
|
|
|
|
nullable=False)
|
|
|
|
user = db.relationship(User,
|
|
|
|
backref=db.backref('tokens', cascade='all, delete-orphan'))
|
|
|
|
password = db.Column(db.String(255), nullable=False)
|
|
|
|
ip = db.Column(db.String(255))
|
|
|
|
|
|
|
|
def check_password(self, password):
|
|
|
|
return hash.sha256_crypt.verify(password, self.password)
|
|
|
|
|
|
|
|
def set_password(self, password):
|
2017-10-29 16:39:14 +02:00
|
|
|
self.password = hash.sha256_crypt.using(rounds=1000).hash(password)
|
2017-10-29 15:48:34 +02:00
|
|
|
|
|
|
|
def __str__(self):
|
2020-08-26 11:27:38 +02:00
|
|
|
return self.comment or self.ip
|
2017-10-29 15:48:34 +02:00
|
|
|
|
|
|
|
|
2016-04-28 20:07:38 +02:00
|
|
|
class Fetch(Base):
|
2020-08-26 11:11:23 +02:00
|
|
|
""" A fetched account is a remote POP/IMAP account fetched into a local
|
2016-04-28 20:07:38 +02:00
|
|
|
account.
|
|
|
|
"""
|
2016-10-16 17:18:00 +02:00
|
|
|
__tablename__ = "fetch"
|
|
|
|
|
2020-08-26 11:27:38 +02:00
|
|
|
_dict_recurse = True
|
|
|
|
_dict_hide = {'user_email', 'user', 'last_check', 'error'}
|
|
|
|
_dict_mandatory = {'protocol', 'host', 'port', 'username', 'password'}
|
|
|
|
_dict_secret = {'password'}
|
|
|
|
|
2016-04-28 20:07:38 +02:00
|
|
|
id = db.Column(db.Integer(), primary_key=True)
|
2018-12-10 15:58:18 +02:00
|
|
|
user_email = db.Column(db.String(255), db.ForeignKey(User.email),
|
2016-04-28 20:07:38 +02:00
|
|
|
nullable=False)
|
2016-08-11 14:13:56 +02:00
|
|
|
user = db.relationship(User,
|
|
|
|
backref=db.backref('fetches', cascade='all, delete-orphan'))
|
2018-12-10 16:03:12 +02:00
|
|
|
protocol = db.Column(db.Enum('imap', 'pop3'), nullable=False)
|
2016-04-28 20:07:38 +02:00
|
|
|
host = db.Column(db.String(255), nullable=False)
|
|
|
|
port = db.Column(db.Integer(), nullable=False)
|
2020-08-26 11:27:38 +02:00
|
|
|
tls = db.Column(db.Boolean(), nullable=False, default=False)
|
2016-04-28 20:07:38 +02:00
|
|
|
username = db.Column(db.String(255), nullable=False)
|
|
|
|
password = db.Column(db.String(255), nullable=False)
|
2020-08-26 11:27:38 +02:00
|
|
|
keep = db.Column(db.Boolean(), nullable=False, default=False)
|
2016-09-10 13:05:55 +02:00
|
|
|
last_check = db.Column(db.DateTime, nullable=True)
|
|
|
|
error = db.Column(db.String(1023), nullable=True)
|
2020-08-26 11:27:38 +02:00
|
|
|
|
|
|
|
def __str__(self):
|
|
|
|
return f'{self.protocol}{"s" if self.tls else ""}://{self.username}@{self.host}:{self.port}'
|