2019-09-27 18:58:57 +03:00
|
|
|
package querylog
|
|
|
|
|
|
|
|
import (
|
|
|
|
"fmt"
|
2021-12-06 17:26:43 +03:00
|
|
|
"net"
|
2019-09-27 18:58:57 +03:00
|
|
|
"net/http"
|
2020-05-26 15:37:37 +03:00
|
|
|
"net/url"
|
|
|
|
"strconv"
|
2021-06-30 11:04:48 +03:00
|
|
|
"strings"
|
2019-09-27 18:58:57 +03:00
|
|
|
"time"
|
|
|
|
|
2021-12-16 20:54:59 +03:00
|
|
|
"github.com/AdguardTeam/AdGuardHome/internal/aghhttp"
|
2019-11-12 15:36:17 +03:00
|
|
|
"github.com/AdguardTeam/golibs/jsonutil"
|
2019-09-27 18:58:57 +03:00
|
|
|
"github.com/AdguardTeam/golibs/log"
|
2021-07-29 17:40:31 +03:00
|
|
|
"github.com/AdguardTeam/golibs/stringutil"
|
2021-12-06 17:26:43 +03:00
|
|
|
"github.com/AdguardTeam/golibs/timeutil"
|
2021-06-30 11:04:48 +03:00
|
|
|
"golang.org/x/net/idna"
|
2019-09-27 18:58:57 +03:00
|
|
|
)
|
|
|
|
|
2020-05-26 15:37:37 +03:00
|
|
|
type qlogConfig struct {
|
2021-07-01 18:50:28 +03:00
|
|
|
// Use float64 here to support fractional numbers and not mess the API
|
|
|
|
// users by changing the units.
|
|
|
|
Interval float64 `json:"interval"`
|
2022-05-25 18:00:50 +03:00
|
|
|
Enabled bool `json:"enabled"`
|
2021-07-01 18:50:28 +03:00
|
|
|
AnonymizeClientIP bool `json:"anonymize_client_ip"`
|
2020-05-26 15:37:37 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
// Register web handlers
|
|
|
|
func (l *queryLog) initWeb() {
|
2021-02-04 14:15:34 +03:00
|
|
|
l.conf.HTTPRegister(http.MethodGet, "/control/querylog", l.handleQueryLog)
|
|
|
|
l.conf.HTTPRegister(http.MethodGet, "/control/querylog_info", l.handleQueryLogInfo)
|
|
|
|
l.conf.HTTPRegister(http.MethodPost, "/control/querylog_clear", l.handleQueryLogClear)
|
|
|
|
l.conf.HTTPRegister(http.MethodPost, "/control/querylog_config", l.handleQueryLogConfig)
|
2020-05-26 15:37:37 +03:00
|
|
|
}
|
|
|
|
|
2019-09-27 18:58:57 +03:00
|
|
|
func (l *queryLog) handleQueryLog(w http.ResponseWriter, r *http.Request) {
|
2020-05-26 15:37:37 +03:00
|
|
|
params, err := l.parseSearchParams(r)
|
|
|
|
if err != nil {
|
2021-12-16 20:54:59 +03:00
|
|
|
aghhttp.Error(r, w, http.StatusBadRequest, "failed to parse params: %s", err)
|
|
|
|
|
2020-05-26 15:37:37 +03:00
|
|
|
return
|
2019-09-27 18:58:57 +03:00
|
|
|
}
|
|
|
|
|
2020-05-26 15:37:37 +03:00
|
|
|
// search for the log entries
|
|
|
|
entries, oldest := l.search(params)
|
2019-09-27 18:58:57 +03:00
|
|
|
|
2020-05-26 15:37:37 +03:00
|
|
|
// convert log entries to JSON
|
2020-11-30 13:32:58 +03:00
|
|
|
data := l.entriesToJSON(entries, oldest)
|
2019-09-27 18:58:57 +03:00
|
|
|
|
2022-10-04 14:35:10 +03:00
|
|
|
_ = aghhttp.WriteJSONResponse(w, r, data)
|
2019-09-27 18:58:57 +03:00
|
|
|
}
|
|
|
|
|
2020-05-26 15:37:37 +03:00
|
|
|
func (l *queryLog) handleQueryLogClear(_ http.ResponseWriter, _ *http.Request) {
|
2019-09-27 18:58:57 +03:00
|
|
|
l.clear()
|
|
|
|
}
|
|
|
|
|
|
|
|
// Get configuration
|
|
|
|
func (l *queryLog) handleQueryLogInfo(w http.ResponseWriter, r *http.Request) {
|
2022-10-04 14:35:10 +03:00
|
|
|
resp := qlogConfig{
|
|
|
|
Enabled: l.conf.Enabled,
|
|
|
|
Interval: l.conf.RotationIvl.Hours() / 24,
|
|
|
|
AnonymizeClientIP: l.conf.AnonymizeClientIP,
|
2019-09-27 18:58:57 +03:00
|
|
|
}
|
2021-12-16 20:54:59 +03:00
|
|
|
|
2022-10-04 14:35:10 +03:00
|
|
|
_ = aghhttp.WriteJSONResponse(w, r, resp)
|
2019-09-27 18:58:57 +03:00
|
|
|
}
|
|
|
|
|
2021-12-06 17:26:43 +03:00
|
|
|
// AnonymizeIP masks ip to anonymize the client if the ip is a valid one.
|
|
|
|
func AnonymizeIP(ip net.IP) {
|
2021-12-07 14:12:59 +03:00
|
|
|
// zeroes is a slice of zero bytes from which the IP address tail is copied.
|
|
|
|
// Using constant string as source of copying is more efficient than byte
|
|
|
|
// slice, see https://github.com/golang/go/issues/49997.
|
|
|
|
const zeroes = "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
|
|
|
|
|
2021-12-06 17:26:43 +03:00
|
|
|
if ip4 := ip.To4(); ip4 != nil {
|
2021-12-07 14:12:59 +03:00
|
|
|
copy(ip4[net.IPv4len-2:net.IPv4len], zeroes)
|
2021-12-06 17:26:43 +03:00
|
|
|
} else if len(ip) == net.IPv6len {
|
2021-12-07 14:12:59 +03:00
|
|
|
copy(ip[net.IPv6len-10:net.IPv6len], zeroes)
|
2021-12-06 17:26:43 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-09-27 18:58:57 +03:00
|
|
|
// Set configuration
|
|
|
|
func (l *queryLog) handleQueryLogConfig(w http.ResponseWriter, r *http.Request) {
|
2021-12-06 17:26:43 +03:00
|
|
|
d := &qlogConfig{}
|
|
|
|
req, err := jsonutil.DecodeObject(d, r.Body)
|
2019-09-27 18:58:57 +03:00
|
|
|
if err != nil {
|
2021-12-16 20:54:59 +03:00
|
|
|
aghhttp.Error(r, w, http.StatusBadRequest, "%s", err)
|
|
|
|
|
2019-09-27 18:58:57 +03:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2021-12-06 17:26:43 +03:00
|
|
|
ivl := time.Duration(float64(timeutil.Day) * d.Interval)
|
2021-07-01 18:50:28 +03:00
|
|
|
if req.Exists("interval") && !checkInterval(ivl) {
|
2021-12-16 20:54:59 +03:00
|
|
|
aghhttp.Error(r, w, http.StatusBadRequest, "Unsupported interval")
|
|
|
|
|
2019-09-27 18:58:57 +03:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2021-12-06 17:26:43 +03:00
|
|
|
defer l.conf.ConfigModified()
|
|
|
|
|
2019-11-12 15:36:17 +03:00
|
|
|
l.lock.Lock()
|
2021-12-06 17:26:43 +03:00
|
|
|
defer l.lock.Unlock()
|
|
|
|
|
|
|
|
// Copy data, modify it, then activate. Other threads (readers) don't need
|
|
|
|
// to use this lock.
|
2019-11-12 15:36:17 +03:00
|
|
|
conf := *l.conf
|
|
|
|
if req.Exists("enabled") {
|
|
|
|
conf.Enabled = d.Enabled
|
|
|
|
}
|
|
|
|
if req.Exists("interval") {
|
2021-07-01 18:50:28 +03:00
|
|
|
conf.RotationIvl = ivl
|
2019-09-27 18:58:57 +03:00
|
|
|
}
|
2020-03-03 20:21:53 +03:00
|
|
|
if req.Exists("anonymize_client_ip") {
|
2021-12-06 17:26:43 +03:00
|
|
|
if conf.AnonymizeClientIP = d.AnonymizeClientIP; conf.AnonymizeClientIP {
|
|
|
|
l.anonymizer.Store(AnonymizeIP)
|
|
|
|
} else {
|
|
|
|
l.anonymizer.Store(nil)
|
|
|
|
}
|
2020-03-03 20:21:53 +03:00
|
|
|
}
|
2019-11-12 15:36:17 +03:00
|
|
|
l.conf = &conf
|
2019-09-27 18:58:57 +03:00
|
|
|
}
|
|
|
|
|
2020-05-26 15:37:37 +03:00
|
|
|
// "value" -> value, return TRUE
|
|
|
|
func getDoubleQuotesEnclosedValue(s *string) bool {
|
|
|
|
t := *s
|
|
|
|
if len(t) >= 2 && t[0] == '"' && t[len(t)-1] == '"' {
|
|
|
|
*s = t[1 : len(t)-1]
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
2021-04-12 18:22:11 +03:00
|
|
|
// parseSearchCriterion parses a search criterion from the query parameter.
|
2021-06-30 11:04:48 +03:00
|
|
|
func (l *queryLog) parseSearchCriterion(q url.Values, name string, ct criterionType) (
|
|
|
|
ok bool,
|
|
|
|
sc searchCriterion,
|
|
|
|
err error,
|
|
|
|
) {
|
2020-05-26 15:37:37 +03:00
|
|
|
val := q.Get(name)
|
2021-06-30 11:04:48 +03:00
|
|
|
if val == "" {
|
|
|
|
return false, sc, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
strict := getDoubleQuotesEnclosedValue(&val)
|
|
|
|
|
|
|
|
var asciiVal string
|
|
|
|
switch ct {
|
|
|
|
case ctTerm:
|
|
|
|
// Decode lowercased value from punycode to make EqualFold and
|
|
|
|
// friends work properly with IDNAs.
|
|
|
|
//
|
|
|
|
// TODO(e.burkov): Make it work with parts of IDNAs somehow.
|
|
|
|
loweredVal := strings.ToLower(val)
|
|
|
|
if asciiVal, err = idna.ToASCII(loweredVal); err != nil {
|
|
|
|
log.Debug("can't convert %q to ascii: %s", val, err)
|
|
|
|
} else if asciiVal == loweredVal {
|
|
|
|
// Purge asciiVal to prevent checking the same value
|
|
|
|
// twice.
|
|
|
|
asciiVal = ""
|
|
|
|
}
|
|
|
|
case ctFilteringStatus:
|
2021-07-29 17:40:31 +03:00
|
|
|
if !stringutil.InSlice(filteringStatusValues, val) {
|
2021-06-30 11:04:48 +03:00
|
|
|
return false, sc, fmt.Errorf("invalid value %s", val)
|
|
|
|
}
|
|
|
|
default:
|
|
|
|
return false, sc, fmt.Errorf(
|
|
|
|
"invalid criterion type %v: should be one of %v",
|
|
|
|
ct,
|
|
|
|
[]criterionType{ctTerm, ctFilteringStatus},
|
|
|
|
)
|
2020-05-26 15:37:37 +03:00
|
|
|
}
|
|
|
|
|
2021-06-30 11:04:48 +03:00
|
|
|
sc = searchCriterion{
|
2021-04-12 18:22:11 +03:00
|
|
|
criterionType: ct,
|
|
|
|
value: val,
|
2021-06-30 11:04:48 +03:00
|
|
|
asciiVal: asciiVal,
|
|
|
|
strict: strict,
|
2020-05-26 15:37:37 +03:00
|
|
|
}
|
|
|
|
|
2021-06-30 11:04:48 +03:00
|
|
|
return true, sc, nil
|
2020-05-26 15:37:37 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
// parseSearchParams - parses "searchParams" from the HTTP request's query string
|
2021-03-11 20:36:54 +03:00
|
|
|
func (l *queryLog) parseSearchParams(r *http.Request) (p *searchParams, err error) {
|
|
|
|
p = newSearchParams()
|
2020-05-26 15:37:37 +03:00
|
|
|
|
|
|
|
q := r.URL.Query()
|
|
|
|
olderThan := q.Get("older_than")
|
|
|
|
if len(olderThan) != 0 {
|
|
|
|
p.olderThan, err = time.Parse(time.RFC3339Nano, olderThan)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-03-11 20:36:54 +03:00
|
|
|
var limit64 int64
|
|
|
|
if limit64, err = strconv.ParseInt(q.Get("limit"), 10, 64); err == nil {
|
|
|
|
p.limit = int(limit64)
|
2020-05-26 15:37:37 +03:00
|
|
|
}
|
2021-03-11 20:36:54 +03:00
|
|
|
|
|
|
|
var offset64 int64
|
|
|
|
if offset64, err = strconv.ParseInt(q.Get("offset"), 10, 64); err == nil {
|
|
|
|
p.offset = int(offset64)
|
2020-09-01 16:30:30 +03:00
|
|
|
|
|
|
|
// If we don't use "olderThan" and use offset/limit instead, we should change the default behavior
|
|
|
|
// and scan all log records until we found enough log entries
|
2020-05-26 15:37:37 +03:00
|
|
|
p.maxFileScanEntries = 0
|
|
|
|
}
|
|
|
|
|
2021-06-30 11:04:48 +03:00
|
|
|
for _, v := range []struct {
|
|
|
|
urlField string
|
|
|
|
ct criterionType
|
|
|
|
}{{
|
|
|
|
urlField: "search",
|
|
|
|
ct: ctTerm,
|
|
|
|
}, {
|
|
|
|
urlField: "response_status",
|
|
|
|
ct: ctFilteringStatus,
|
|
|
|
}} {
|
2021-03-11 20:36:54 +03:00
|
|
|
var ok bool
|
2021-04-12 18:22:11 +03:00
|
|
|
var c searchCriterion
|
2021-06-30 11:04:48 +03:00
|
|
|
ok, c, err = l.parseSearchCriterion(q, v.urlField, v.ct)
|
2020-05-26 15:37:37 +03:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
if ok {
|
|
|
|
p.searchCriteria = append(p.searchCriteria, c)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return p, nil
|
2019-09-27 18:58:57 +03:00
|
|
|
}
|