mirror of
https://github.com/AdguardTeam/AdGuardHome.git
synced 2025-01-09 15:37:22 +03:00
querylog: export
This commit is contained in:
parent
f4f2c11eb9
commit
8813e135b6
2 changed files with 182 additions and 5 deletions
111
internal/querylog/csv.go
Normal file
111
internal/querylog/csv.go
Normal file
|
@ -0,0 +1,111 @@
|
|||
package querylog
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/AdguardTeam/golibs/log"
|
||||
"github.com/miekg/dns"
|
||||
)
|
||||
|
||||
// csvHeaderRow is a slice of strings with row names for CSV header. This
|
||||
// const should correspond with [logEntry.toCSV] func.
|
||||
var csvHeaderRow = []string{
|
||||
"ans_dnssec",
|
||||
"ans_rcode",
|
||||
"ans_type",
|
||||
"ans_value",
|
||||
"cached",
|
||||
"client_ip",
|
||||
"client_id",
|
||||
"ecs",
|
||||
"elapsed",
|
||||
"filter_id",
|
||||
"filter_rule",
|
||||
"proto",
|
||||
"qclass",
|
||||
"qname",
|
||||
"qtype",
|
||||
"reason",
|
||||
"time",
|
||||
"upstream",
|
||||
}
|
||||
|
||||
// toCSV returns a slice of strings with entry fields according to the
|
||||
// csvHeaderRow slice.
|
||||
func (e *logEntry) toCSV() (out []string) {
|
||||
var filterID, filterRule string
|
||||
|
||||
if e.Result.IsFiltered && len(e.Result.Rules) > 0 {
|
||||
rule := e.Result.Rules[0]
|
||||
filterID = strconv.FormatInt(rule.FilterListID, 10)
|
||||
filterRule = rule.Text
|
||||
}
|
||||
|
||||
aData := ansData(e)
|
||||
|
||||
return []string{
|
||||
aData.dnsSec,
|
||||
aData.rCode,
|
||||
aData.typ,
|
||||
aData.value,
|
||||
strconv.FormatBool(e.Cached),
|
||||
e.IP.String(),
|
||||
e.ClientID,
|
||||
e.ReqECS,
|
||||
strconv.FormatFloat(e.Elapsed.Seconds()*1000, 'f', -1, 64),
|
||||
filterID,
|
||||
filterRule,
|
||||
string(e.ClientProto),
|
||||
e.QClass,
|
||||
e.QHost,
|
||||
e.QType,
|
||||
e.Result.Reason.String(),
|
||||
e.Time.Format(time.RFC3339Nano),
|
||||
e.Upstream,
|
||||
}
|
||||
}
|
||||
|
||||
// csvAnswer is a helper struct for csv row answer fields.
|
||||
type csvAnswer struct {
|
||||
dnsSec string
|
||||
rCode string
|
||||
typ string
|
||||
value string
|
||||
}
|
||||
|
||||
// ansData returns a map with message answer data.
|
||||
func ansData(entry *logEntry) (out csvAnswer) {
|
||||
if len(entry.Answer) == 0 {
|
||||
return out
|
||||
}
|
||||
|
||||
msg := &dns.Msg{}
|
||||
if err := msg.Unpack(entry.Answer); err != nil {
|
||||
log.Debug("querylog: failed to unpack dns msg answer: %v: %s", entry.Answer, err)
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
out.rCode = dns.RcodeToString[msg.Rcode]
|
||||
|
||||
// Old query logs may still keep AD flag value in the message. Try to get
|
||||
// it from there as well.
|
||||
out.dnsSec = strconv.FormatBool(entry.AuthenticatedData || msg.AuthenticatedData)
|
||||
|
||||
if len(msg.Answer) == 0 {
|
||||
return out
|
||||
}
|
||||
|
||||
rr := msg.Answer[0]
|
||||
header := rr.Header()
|
||||
|
||||
out.typ = dns.TypeToString[header.Rrtype]
|
||||
|
||||
// Remove the header string from the answer value since it's mostly
|
||||
// unnecessary in the log.
|
||||
out.value = strings.TrimPrefix(rr.String(), header.String())
|
||||
|
||||
return out
|
||||
}
|
|
@ -1,6 +1,7 @@
|
|||
package querylog
|
||||
|
||||
import (
|
||||
"encoding/csv"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"math"
|
||||
|
@ -14,6 +15,7 @@ import (
|
|||
"github.com/AdguardTeam/AdGuardHome/internal/aghalg"
|
||||
"github.com/AdguardTeam/AdGuardHome/internal/aghhttp"
|
||||
"github.com/AdguardTeam/AdGuardHome/internal/aghnet"
|
||||
"github.com/AdguardTeam/golibs/httphdr"
|
||||
"github.com/AdguardTeam/golibs/log"
|
||||
"github.com/AdguardTeam/golibs/stringutil"
|
||||
"github.com/AdguardTeam/golibs/timeutil"
|
||||
|
@ -99,10 +101,63 @@ func (l *queryLog) handleQueryLog(w http.ResponseWriter, r *http.Request) {
|
|||
|
||||
// handleQueryLogExport is the handler for the GET /control/querylog/export
|
||||
// HTTP API.
|
||||
//
|
||||
// TODO(d.kolyshev): !! Implement handleQueryLogExport.
|
||||
func (l *queryLog) handleQueryLogExport(w http.ResponseWriter, r *http.Request) {
|
||||
aghhttp.Error(r, w, http.StatusBadRequest, "not implemented")
|
||||
searchCriteria, err := parseSearchCriteria(r.URL.Query())
|
||||
if err != nil {
|
||||
aghhttp.Error(r, w, http.StatusBadRequest, "parsing params: %s", err)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
params := &searchParams{
|
||||
// TODO(a.meshkov): Consider making configurable.
|
||||
limit: 500,
|
||||
searchCriteria: searchCriteria,
|
||||
}
|
||||
|
||||
w.Header().Set(httphdr.ContentType, "text/csv")
|
||||
w.Header().Set(httphdr.ContentDisposition, "attachment;filename=data.csv")
|
||||
|
||||
csvWriter := csv.NewWriter(w)
|
||||
defer func() {
|
||||
if err = csvWriter.Error(); err != nil {
|
||||
http.Error(w, "writing csv", http.StatusInternalServerError)
|
||||
}
|
||||
}()
|
||||
|
||||
// Write header.
|
||||
if err = csvWriter.Write(csvHeaderRow); err != nil {
|
||||
http.Error(w, "writing csv header", http.StatusInternalServerError)
|
||||
|
||||
return
|
||||
}
|
||||
csvWriter.Flush()
|
||||
|
||||
var entries []*logEntry
|
||||
for {
|
||||
func() {
|
||||
l.confMu.RLock()
|
||||
defer l.confMu.RUnlock()
|
||||
|
||||
entries, _ = l.search(params)
|
||||
}()
|
||||
|
||||
if len(entries) == 0 {
|
||||
break
|
||||
}
|
||||
|
||||
params.offset += params.limit
|
||||
|
||||
for _, entry := range entries {
|
||||
if err = csvWriter.Write(entry.toCSV()); err != nil {
|
||||
http.Error(w, "writing csv record", http.StatusInternalServerError)
|
||||
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
csvWriter.Flush()
|
||||
}
|
||||
}
|
||||
|
||||
// handleQueryLogClear is the handler for the POST /control/querylog/clear HTTP
|
||||
|
@ -369,6 +424,17 @@ func parseSearchParams(r *http.Request) (p *searchParams, err error) {
|
|||
p.maxFileScanEntries = 0
|
||||
}
|
||||
|
||||
p.searchCriteria, err = parseSearchCriteria(q)
|
||||
if err != nil {
|
||||
// Don't wrap the error, because it's informative enough as is.
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return p, nil
|
||||
}
|
||||
|
||||
// parseSearchCriteria parses a list of search criteria from the query.
|
||||
func parseSearchCriteria(q url.Values) (searchCriteria []searchCriterion, err error) {
|
||||
for _, v := range []struct {
|
||||
urlField string
|
||||
ct criterionType
|
||||
|
@ -387,9 +453,9 @@ func parseSearchParams(r *http.Request) (p *searchParams, err error) {
|
|||
}
|
||||
|
||||
if ok {
|
||||
p.searchCriteria = append(p.searchCriteria, c)
|
||||
searchCriteria = append(searchCriteria, c)
|
||||
}
|
||||
}
|
||||
|
||||
return p, nil
|
||||
return searchCriteria, nil
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue