1
0
mirror of https://github.com/axllent/mailpit.git synced 2025-03-21 21:47:19 +02:00
mailpit/storage/search.go

319 lines
7.7 KiB
Go
Raw Normal View History

2022-09-01 21:45:35 +12:00
package storage
import (
2023-09-22 06:46:23 +12:00
"context"
"database/sql"
"encoding/json"
2022-09-01 21:45:35 +12:00
"regexp"
"strings"
2023-09-22 06:46:23 +12:00
"time"
2022-09-01 21:45:35 +12:00
2023-09-22 06:46:23 +12:00
"github.com/axllent/mailpit/utils/logger"
"github.com/axllent/mailpit/utils/tools"
2022-09-01 21:45:35 +12:00
"github.com/leporo/sqlf"
)
2023-09-22 06:46:23 +12:00
// Search will search a mailbox for search terms.
// The search is broken up by segments (exact phrases can be quoted), and interprets specific terms such as:
// is:read, is:unread, has:attachment, to:<term>, from:<term> & subject:<term>
// Negative searches also also included by prefixing the search term with a `-` or `!`
func Search(search string, start, limit int) ([]MessageSummary, int, error) {
results := []MessageSummary{}
allResults := []MessageSummary{}
tsStart := time.Now()
nrResults := 0
if limit < 0 {
limit = 50
}
q := searchParser(search)
var err error
if err := q.QueryAndClose(nil, db, func(row *sql.Rows) {
var created int64
var id string
var messageID string
var subject string
var metadata string
var size int
var attachments int
var tags string
var read int
var ignore string
em := MessageSummary{}
if err := row.Scan(&created, &id, &messageID, &subject, &metadata, &size, &attachments, &read, &tags, &ignore, &ignore, &ignore, &ignore); err != nil {
logger.Log().Error(err)
return
}
if err := json.Unmarshal([]byte(metadata), &em); err != nil {
logger.Log().Error(err)
return
}
if err := json.Unmarshal([]byte(tags), &em.Tags); err != nil {
logger.Log().Error(err)
return
}
em.Created = time.UnixMilli(created)
em.ID = id
em.MessageID = messageID
em.Subject = subject
em.Size = size
em.Attachments = attachments
em.Read = read == 1
allResults = append(allResults, em)
}); err != nil {
return results, nrResults, err
}
dbLastAction = time.Now()
nrResults = len(allResults)
if nrResults > start {
end := nrResults
if nrResults >= start+limit {
end = start + limit
}
results = allResults[start:end]
}
elapsed := time.Since(tsStart)
logger.Log().Debugf("[db] search for \"%s\" in %s", search, elapsed)
return results, nrResults, err
}
// DeleteSearch will delete all messages for search terms.
// The search is broken up by segments (exact phrases can be quoted), and interprets specific terms such as:
// is:read, is:unread, has:attachment, to:<term>, from:<term> & subject:<term>
// Negative searches also also included by prefixing the search term with a `-` or `!`
func DeleteSearch(search string) error {
q := searchParser(search)
ids := []string{}
if err := q.QueryAndClose(nil, db, func(row *sql.Rows) {
var created int64
var id string
var messageID string
var subject string
var metadata string
var size int
var attachments int
var tags string
var read int
var ignore string
if err := row.Scan(&created, &id, &messageID, &subject, &metadata, &size, &attachments, &read, &tags, &ignore, &ignore, &ignore, &ignore); err != nil {
logger.Log().Error(err)
return
}
ids = append(ids, id)
}); err != nil {
return err
}
if len(ids) > 0 {
total := len(ids)
// split ids into chunks of 1000 ids
2023-09-22 06:46:23 +12:00
var chunks [][]string
if total > 1000 {
chunkSize := 1000
chunks = make([][]string, 0, (len(ids)+chunkSize-1)/chunkSize)
for chunkSize < len(ids) {
ids, chunks = ids[chunkSize:], append(chunks, ids[0:chunkSize:chunkSize])
}
if len(ids) > 0 {
// add remaining ids <= 1000
chunks = append(chunks, ids)
}
2023-09-22 06:46:23 +12:00
} else {
chunks = append(chunks, ids)
}
// begin a transaction to ensure both the message
// and data are deleted successfully
tx, err := db.BeginTx(context.Background(), nil)
if err != nil {
return err
}
// roll back if it fails
defer tx.Rollback()
for _, ids := range chunks {
delIDs := make([]interface{}, len(ids))
for i, id := range ids {
delIDs[i] = id
}
sqlDelete1 := `DELETE FROM mailbox WHERE ID IN (?` + strings.Repeat(",?", len(ids)-1) + `)`
_, err = tx.Exec(sqlDelete1, delIDs...)
if err != nil {
return err
}
sqlDelete2 := `DELETE FROM mailbox_data WHERE ID IN (?` + strings.Repeat(",?", len(ids)-1) + `)`
_, err = tx.Exec(sqlDelete2, delIDs...)
if err != nil {
return err
}
}
err = tx.Commit()
if err == nil {
logger.Log().Debugf("[db] deleted %d messages matching %s", total, search)
}
dbLastAction = time.Now()
dbDataDeleted = true
BroadcastMailboxStats()
}
return nil
}
2022-09-01 21:45:35 +12:00
// SearchParser returns the SQL syntax for the database search based on the search arguments
2023-09-22 06:46:23 +12:00
func searchParser(searchString string) *sqlf.Stmt {
searchString = strings.ToLower(searchString)
// group strings with quotes as a single argument and remove quotes
args := tools.ArgsParser(searchString)
2022-09-01 21:45:35 +12:00
q := sqlf.From("mailbox").
2023-05-30 16:51:34 +12:00
Select(`Created, ID, MessageID, Subject, Metadata, Size, Attachments, Read, Tags,
IFNULL(json_extract(Metadata, '$.To'), '{}') as ToJSON,
IFNULL(json_extract(Metadata, '$.From'), '{}') as FromJSON,
IFNULL(json_extract(Metadata, '$.Cc'), '{}') as CcJSON,
IFNULL(json_extract(Metadata, '$.Bcc'), '{}') as BccJSON
`).OrderBy("Created DESC")
2022-09-01 21:45:35 +12:00
for _, w := range args {
if cleanString(w) == "" {
continue
}
exclude := false
// search terms starting with a `-` or `!` imply an exclude
if len(w) > 1 && (strings.HasPrefix(w, "-") || strings.HasPrefix(w, "!")) {
exclude = true
w = w[1:]
}
re := regexp.MustCompile(`[a-zA-Z0-9]+`)
if !re.MatchString(w) {
continue
}
if strings.HasPrefix(w, "to:") {
w = cleanString(w[3:])
if w != "" {
if exclude {
q.Where("ToJSON NOT LIKE ?", "%"+escPercentChar(w)+"%")
} else {
q.Where("ToJSON LIKE ?", "%"+escPercentChar(w)+"%")
}
}
} else if strings.HasPrefix(w, "from:") {
w = cleanString(w[5:])
if w != "" {
if exclude {
q.Where("FromJSON NOT LIKE ?", "%"+escPercentChar(w)+"%")
} else {
q.Where("FromJSON LIKE ?", "%"+escPercentChar(w)+"%")
}
}
2023-02-11 22:50:14 +13:00
} else if strings.HasPrefix(w, "cc:") {
w = cleanString(w[3:])
if w != "" {
if exclude {
q.Where("CcJSON NOT LIKE ?", "%"+escPercentChar(w)+"%")
} else {
q.Where("CcJSON LIKE ?", "%"+escPercentChar(w)+"%")
}
}
} else if strings.HasPrefix(w, "bcc:") {
w = cleanString(w[4:])
if w != "" {
if exclude {
q.Where("BccJSON NOT LIKE ?", "%"+escPercentChar(w)+"%")
} else {
q.Where("BccJSON LIKE ?", "%"+escPercentChar(w)+"%")
}
}
2022-09-01 21:45:35 +12:00
} else if strings.HasPrefix(w, "subject:") {
2023-09-22 06:46:23 +12:00
w = w[8:]
2022-09-01 21:45:35 +12:00
if w != "" {
if exclude {
q.Where("Subject NOT LIKE ?", "%"+escPercentChar(w)+"%")
} else {
q.Where("Subject LIKE ?", "%"+escPercentChar(w)+"%")
}
}
} else if strings.HasPrefix(w, "message-id:") {
w = cleanString(w[11:])
if w != "" {
if exclude {
q.Where("MessageID NOT LIKE ?", "%"+escPercentChar(w)+"%")
} else {
q.Where("MessageID LIKE ?", "%"+escPercentChar(w)+"%")
}
}
} else if strings.HasPrefix(w, "tag:") {
w = cleanString(w[4:])
if w != "" {
if exclude {
q.Where("Tags NOT LIKE ?", "%\""+escPercentChar(w)+"\"%")
} else {
q.Where("Tags LIKE ?", "%\""+escPercentChar(w)+"\"%")
}
}
2022-09-01 21:45:35 +12:00
} else if w == "is:read" {
if exclude {
q.Where("Read = 0")
} else {
q.Where("Read = 1")
}
} else if w == "is:unread" {
if exclude {
q.Where("Read = 1")
} else {
q.Where("Read = 0")
}
} else if w == "is:tagged" {
if exclude {
q.Where("Tags = ?", "[]")
} else {
q.Where("Tags != ?", "[]")
}
2022-09-01 21:45:35 +12:00
} else if w == "has:attachment" || w == "has:attachments" {
if exclude {
q.Where("Attachments = 0")
} else {
q.Where("Attachments > 0")
}
} else {
// search text
if exclude {
q.Where("SearchText NOT LIKE ?", "%"+cleanString(escPercentChar(w))+"%")
2022-09-01 21:45:35 +12:00
} else {
q.Where("SearchText LIKE ?", "%"+cleanString(escPercentChar(w))+"%")
2022-09-01 21:45:35 +12:00
}
}
}
return q
}