2019-06-10 11:33:19 +03:00
|
|
|
package home
|
2018-11-28 20:14:54 +03:00
|
|
|
|
|
|
|
import (
|
|
|
|
"fmt"
|
2019-03-15 16:49:10 +03:00
|
|
|
"hash/crc32"
|
2018-11-28 20:14:54 +03:00
|
|
|
"io/ioutil"
|
|
|
|
"os"
|
|
|
|
"path/filepath"
|
|
|
|
"regexp"
|
|
|
|
"strconv"
|
|
|
|
"strings"
|
2019-10-10 17:12:32 +03:00
|
|
|
"sync"
|
2018-11-28 20:14:54 +03:00
|
|
|
"time"
|
|
|
|
|
2018-11-30 13:24:42 +03:00
|
|
|
"github.com/AdguardTeam/AdGuardHome/dnsfilter"
|
2019-03-06 12:20:34 +03:00
|
|
|
"github.com/AdguardTeam/golibs/file"
|
2019-02-25 16:44:22 +03:00
|
|
|
"github.com/AdguardTeam/golibs/log"
|
2018-11-28 20:14:54 +03:00
|
|
|
)
|
|
|
|
|
|
|
|
var (
|
|
|
|
nextFilterID = time.Now().Unix() // semi-stable way to generate an unique ID
|
|
|
|
filterTitleRegexp = regexp.MustCompile(`^! Title: +(.*)$`)
|
2019-10-10 17:12:32 +03:00
|
|
|
refreshStatus uint32 // 0:none; 1:in progress
|
|
|
|
refreshLock sync.Mutex
|
2018-11-28 20:14:54 +03:00
|
|
|
)
|
|
|
|
|
2019-09-04 14:12:00 +03:00
|
|
|
func initFiltering() {
|
|
|
|
loadFilters()
|
|
|
|
deduplicateFilters()
|
|
|
|
updateUniqueFilterID(config.Filters)
|
2019-10-17 14:33:38 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
func startFiltering() {
|
|
|
|
// Here we should start updating filters,
|
|
|
|
// but currently we can't wake up the periodic task to do so.
|
|
|
|
// So for now we just start this periodic task from here.
|
2019-09-04 14:12:00 +03:00
|
|
|
go periodicallyRefreshFilters()
|
|
|
|
}
|
|
|
|
|
|
|
|
func defaultFilters() []filter {
|
|
|
|
return []filter{
|
|
|
|
{Filter: dnsfilter.Filter{ID: 1}, Enabled: true, URL: "https://adguardteam.github.io/AdGuardSDNSFilter/Filters/filter.txt", Name: "AdGuard Simplified Domain Names filter"},
|
|
|
|
{Filter: dnsfilter.Filter{ID: 2}, Enabled: false, URL: "https://adaway.org/hosts.txt", Name: "AdAway"},
|
|
|
|
{Filter: dnsfilter.Filter{ID: 3}, Enabled: false, URL: "https://hosts-file.net/ad_servers.txt", Name: "hpHosts - Ad and Tracking servers only"},
|
|
|
|
{Filter: dnsfilter.Filter{ID: 4}, Enabled: false, URL: "https://www.malwaredomainlist.com/hostslist/hosts.txt", Name: "MalwareDomainList.com Hosts List"},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-11-28 20:14:54 +03:00
|
|
|
// field ordering is important -- yaml fields will mirror ordering from here
|
|
|
|
type filter struct {
|
2019-09-04 14:12:00 +03:00
|
|
|
Enabled bool
|
|
|
|
URL string
|
|
|
|
Name string `yaml:"name"`
|
|
|
|
RulesCount int `yaml:"-"`
|
|
|
|
LastUpdated time.Time `yaml:"-"`
|
2019-03-15 16:49:10 +03:00
|
|
|
checksum uint32 // checksum of the file data
|
2018-11-28 20:14:54 +03:00
|
|
|
|
2018-11-30 13:24:42 +03:00
|
|
|
dnsfilter.Filter `yaml:",inline"`
|
2018-11-28 20:14:54 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
// Creates a helper object for working with the user rules
|
|
|
|
func userFilter() filter {
|
2019-05-15 15:32:42 +03:00
|
|
|
f := filter{
|
2018-11-28 20:14:54 +03:00
|
|
|
// User filter always has constant ID=0
|
|
|
|
Enabled: true,
|
|
|
|
}
|
2019-05-15 15:32:42 +03:00
|
|
|
f.Filter.Data = []byte(strings.Join(config.UserRules, "\n"))
|
|
|
|
return f
|
2018-11-28 20:14:54 +03:00
|
|
|
}
|
|
|
|
|
2019-11-06 15:56:29 +03:00
|
|
|
const (
|
|
|
|
statusFound = 1
|
|
|
|
statusEnabledChanged = 2
|
|
|
|
statusURLChanged = 4
|
|
|
|
statusURLExists = 8
|
|
|
|
)
|
|
|
|
|
|
|
|
// Update properties for a filter specified by its URL
|
|
|
|
// Return status* flags.
|
|
|
|
func filterSetProperties(url string, newf filter) int {
|
|
|
|
r := 0
|
2019-03-18 14:12:04 +03:00
|
|
|
config.Lock()
|
2019-11-06 15:56:29 +03:00
|
|
|
defer config.Unlock()
|
|
|
|
|
2019-03-18 14:12:04 +03:00
|
|
|
for i := range config.Filters {
|
2019-11-06 15:56:29 +03:00
|
|
|
f := &config.Filters[i]
|
|
|
|
if f.URL != url {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
log.Debug("filter: set properties: %s: {%s %s %v}",
|
|
|
|
f.URL, newf.Name, newf.URL, newf.Enabled)
|
|
|
|
f.Name = newf.Name
|
|
|
|
|
|
|
|
if f.URL != newf.URL {
|
|
|
|
r |= statusURLChanged
|
|
|
|
if filterExistsNoLock(newf.URL) {
|
|
|
|
return statusURLExists
|
|
|
|
}
|
|
|
|
f.URL = newf.URL
|
|
|
|
f.unload()
|
|
|
|
f.LastUpdated = time.Time{}
|
|
|
|
}
|
|
|
|
|
|
|
|
if f.Enabled != newf.Enabled {
|
|
|
|
r |= statusEnabledChanged
|
|
|
|
f.Enabled = newf.Enabled
|
|
|
|
if f.Enabled {
|
|
|
|
if (r & statusURLChanged) == 0 {
|
|
|
|
e := f.load()
|
|
|
|
if e != nil {
|
|
|
|
// This isn't a fatal error,
|
|
|
|
// because it may occur when someone removes the file from disk.
|
|
|
|
// In this case the periodic update task will try to download the file.
|
|
|
|
f.LastUpdated = time.Time{}
|
|
|
|
}
|
2019-03-18 14:12:04 +03:00
|
|
|
}
|
|
|
|
} else {
|
2019-11-06 15:56:29 +03:00
|
|
|
f.unload()
|
2019-03-18 14:12:04 +03:00
|
|
|
}
|
|
|
|
}
|
2019-11-06 15:56:29 +03:00
|
|
|
|
|
|
|
return r | statusFound
|
2019-03-18 14:12:04 +03:00
|
|
|
}
|
2019-11-06 15:56:29 +03:00
|
|
|
return 0
|
2019-03-18 14:12:04 +03:00
|
|
|
}
|
|
|
|
|
2019-03-18 14:41:38 +03:00
|
|
|
// Return TRUE if a filter with this URL exists
|
|
|
|
func filterExists(url string) bool {
|
|
|
|
config.RLock()
|
2019-11-06 15:56:29 +03:00
|
|
|
r := filterExistsNoLock(url)
|
|
|
|
config.RUnlock()
|
|
|
|
return r
|
|
|
|
}
|
|
|
|
|
|
|
|
// Return TRUE if a filter with this URL exists
|
|
|
|
func filterExistsNoLock(url string) bool {
|
|
|
|
r := false
|
2019-03-18 14:41:38 +03:00
|
|
|
for i := range config.Filters {
|
|
|
|
if config.Filters[i].URL == url {
|
|
|
|
r = true
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return r
|
|
|
|
}
|
|
|
|
|
|
|
|
// Add a filter
|
|
|
|
// Return FALSE if a filter with this URL exists
|
|
|
|
func filterAdd(f filter) bool {
|
|
|
|
config.Lock()
|
|
|
|
|
|
|
|
// Check for duplicates
|
|
|
|
for i := range config.Filters {
|
|
|
|
if config.Filters[i].URL == f.URL {
|
|
|
|
config.Unlock()
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
config.Filters = append(config.Filters, f)
|
|
|
|
config.Unlock()
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
|
2019-03-15 19:41:45 +03:00
|
|
|
// Load filters from the disk
|
|
|
|
// And if any filter has zero ID, assign a new one
|
|
|
|
func loadFilters() {
|
|
|
|
for i := range config.Filters {
|
|
|
|
filter := &config.Filters[i] // otherwise we're operating on a copy
|
|
|
|
if filter.ID == 0 {
|
|
|
|
filter.ID = assignUniqueFilterID()
|
|
|
|
}
|
2019-03-18 12:52:34 +03:00
|
|
|
|
|
|
|
if !filter.Enabled {
|
|
|
|
// No need to load a filter that is not enabled
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2019-03-15 19:41:45 +03:00
|
|
|
err := filter.load()
|
|
|
|
if err != nil {
|
2019-09-04 14:12:00 +03:00
|
|
|
log.Error("Couldn't load filter %d contents due to %s", filter.ID, err)
|
2019-03-15 19:41:45 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-11-28 20:14:54 +03:00
|
|
|
func deduplicateFilters() {
|
|
|
|
// Deduplicate filters
|
|
|
|
i := 0 // output index, used for deletion later
|
|
|
|
urls := map[string]bool{}
|
|
|
|
for _, filter := range config.Filters {
|
|
|
|
if _, ok := urls[filter.URL]; !ok {
|
|
|
|
// we didn't see it before, keep it
|
|
|
|
urls[filter.URL] = true // remember the URL
|
|
|
|
config.Filters[i] = filter
|
|
|
|
i++
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// all entries we want to keep are at front, delete the rest
|
|
|
|
config.Filters = config.Filters[:i]
|
|
|
|
}
|
|
|
|
|
|
|
|
// Set the next filter ID to max(filter.ID) + 1
|
|
|
|
func updateUniqueFilterID(filters []filter) {
|
|
|
|
for _, filter := range filters {
|
|
|
|
if nextFilterID < filter.ID {
|
|
|
|
nextFilterID = filter.ID + 1
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func assignUniqueFilterID() int64 {
|
|
|
|
value := nextFilterID
|
2019-01-24 20:11:01 +03:00
|
|
|
nextFilterID++
|
2018-11-28 20:14:54 +03:00
|
|
|
return value
|
|
|
|
}
|
|
|
|
|
|
|
|
// Sets up a timer that will be checking for filters updates periodically
|
|
|
|
func periodicallyRefreshFilters() {
|
2020-01-15 18:41:27 +03:00
|
|
|
const maxInterval = 1 * 60 * 60
|
|
|
|
intval := 5 // use a dynamically increasing time interval
|
|
|
|
nUpdated := 0
|
2019-09-04 14:12:00 +03:00
|
|
|
for {
|
2020-01-15 18:41:27 +03:00
|
|
|
isNetworkErr := false
|
2019-10-17 14:33:38 +03:00
|
|
|
if config.DNS.FiltersUpdateIntervalHours != 0 && refreshStatus == 0 {
|
2019-10-10 17:12:32 +03:00
|
|
|
refreshStatus = 1
|
|
|
|
refreshLock.Lock()
|
2020-01-15 18:41:27 +03:00
|
|
|
nUpdated, isNetworkErr = refreshFiltersIfNecessary(false)
|
2019-10-10 17:12:32 +03:00
|
|
|
refreshLock.Unlock()
|
|
|
|
refreshStatus = 0
|
2020-01-15 18:41:27 +03:00
|
|
|
if nUpdated != 0 {
|
|
|
|
intval = maxInterval
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if isNetworkErr {
|
|
|
|
intval *= 2
|
|
|
|
if intval > maxInterval {
|
|
|
|
intval = maxInterval
|
|
|
|
}
|
2019-10-09 19:51:26 +03:00
|
|
|
}
|
2020-01-15 18:41:27 +03:00
|
|
|
|
|
|
|
time.Sleep(time.Duration(intval) * time.Second)
|
2018-11-28 20:14:54 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-10-10 17:12:32 +03:00
|
|
|
// Refresh filters
|
|
|
|
func refreshFilters() (int, error) {
|
|
|
|
if refreshStatus != 0 { // we could use atomic cmpxchg here, but it's not really required
|
|
|
|
return 0, fmt.Errorf("Filters update procedure is already running")
|
|
|
|
}
|
|
|
|
|
|
|
|
refreshStatus = 1
|
|
|
|
refreshLock.Lock()
|
2020-01-15 18:41:27 +03:00
|
|
|
nUpdated, _ := refreshFiltersIfNecessary(true)
|
2019-10-10 17:12:32 +03:00
|
|
|
refreshLock.Unlock()
|
|
|
|
refreshStatus = 0
|
|
|
|
return nUpdated, nil
|
2019-10-09 19:51:26 +03:00
|
|
|
}
|
|
|
|
|
2018-11-28 20:14:54 +03:00
|
|
|
// Checks filters updates if necessary
|
|
|
|
// If force is true, it ignores the filter.LastUpdated field value
|
2019-03-18 17:23:02 +03:00
|
|
|
//
|
|
|
|
// Algorithm:
|
|
|
|
// . Get the list of filters to be updated
|
|
|
|
// . For each filter run the download and checksum check operation
|
2019-07-16 12:55:18 +03:00
|
|
|
// . For each filter:
|
|
|
|
// . If filter data hasn't changed, just set new update time on file
|
2019-10-21 19:49:56 +03:00
|
|
|
// . If filter data has changed:
|
|
|
|
// . rename the old file (1.txt -> 1.txt.old)
|
|
|
|
// . store the new data on disk (1.txt)
|
|
|
|
// . Pass new filters to dnsfilter object - it analyzes new data while the old filters are still active
|
|
|
|
// . dnsfilter activates new filters
|
|
|
|
// . Remove the old filter files (1.txt.old)
|
2020-01-15 18:41:27 +03:00
|
|
|
//
|
|
|
|
// Return the number of updated filters
|
|
|
|
// Return TRUE - there was a network error and nothing could be updated
|
|
|
|
func refreshFiltersIfNecessary(force bool) (int, bool) {
|
2019-03-18 17:23:02 +03:00
|
|
|
var updateFilters []filter
|
2019-07-16 12:55:18 +03:00
|
|
|
var updateFlags []bool // 'true' if filter data has changed
|
2018-11-28 20:14:54 +03:00
|
|
|
|
2019-10-09 19:51:26 +03:00
|
|
|
log.Debug("Filters: updating...")
|
|
|
|
|
2019-09-04 14:12:00 +03:00
|
|
|
now := time.Now()
|
2019-03-18 17:23:02 +03:00
|
|
|
config.RLock()
|
2018-11-28 20:14:54 +03:00
|
|
|
for i := range config.Filters {
|
2019-03-18 17:23:02 +03:00
|
|
|
f := &config.Filters[i] // otherwise we will be operating on a copy
|
2018-11-28 20:14:54 +03:00
|
|
|
|
2019-03-18 17:23:02 +03:00
|
|
|
if !f.Enabled {
|
2019-03-18 12:52:34 +03:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2019-09-04 14:12:00 +03:00
|
|
|
expireTime := f.LastUpdated.Unix() + int64(config.DNS.FiltersUpdateIntervalHours)*60*60
|
|
|
|
if !force && expireTime > now.Unix() {
|
2019-03-15 16:09:43 +03:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2019-03-18 17:23:02 +03:00
|
|
|
var uf filter
|
|
|
|
uf.ID = f.ID
|
|
|
|
uf.URL = f.URL
|
2019-03-26 19:04:50 +03:00
|
|
|
uf.Name = f.Name
|
2019-03-18 17:23:02 +03:00
|
|
|
uf.checksum = f.checksum
|
|
|
|
updateFilters = append(updateFilters, uf)
|
|
|
|
}
|
|
|
|
config.RUnlock()
|
|
|
|
|
2020-01-15 18:41:27 +03:00
|
|
|
nfail := 0
|
2019-03-18 17:23:02 +03:00
|
|
|
for i := range updateFilters {
|
|
|
|
uf := &updateFilters[i]
|
|
|
|
updated, err := uf.update()
|
2019-07-16 15:29:36 +03:00
|
|
|
updateFlags = append(updateFlags, updated)
|
2018-11-28 20:14:54 +03:00
|
|
|
if err != nil {
|
2020-01-15 18:41:27 +03:00
|
|
|
nfail++
|
2019-03-18 17:23:02 +03:00
|
|
|
log.Printf("Failed to update filter %s: %s\n", uf.URL, err)
|
2018-11-28 20:14:54 +03:00
|
|
|
continue
|
|
|
|
}
|
2019-09-04 14:12:00 +03:00
|
|
|
uf.LastUpdated = now
|
2019-07-16 14:32:58 +03:00
|
|
|
}
|
2019-07-16 12:55:18 +03:00
|
|
|
|
2020-01-15 18:41:27 +03:00
|
|
|
if nfail == len(updateFilters) {
|
|
|
|
return 0, true
|
|
|
|
}
|
|
|
|
|
2019-10-09 19:51:26 +03:00
|
|
|
updateCount := 0
|
2019-07-16 12:55:18 +03:00
|
|
|
for i := range updateFilters {
|
|
|
|
uf := &updateFilters[i]
|
|
|
|
updated := updateFlags[i]
|
2018-11-28 20:14:54 +03:00
|
|
|
if updated {
|
2019-10-09 19:51:26 +03:00
|
|
|
err := uf.saveAndBackupOld()
|
2018-11-28 20:14:54 +03:00
|
|
|
if err != nil {
|
2019-03-18 17:23:02 +03:00
|
|
|
log.Printf("Failed to save the updated filter %d: %s", uf.ID, err)
|
2018-11-28 20:14:54 +03:00
|
|
|
continue
|
|
|
|
}
|
2019-03-15 16:09:43 +03:00
|
|
|
} else {
|
2019-07-16 12:55:18 +03:00
|
|
|
e := os.Chtimes(uf.Path(), uf.LastUpdated, uf.LastUpdated)
|
2019-03-18 17:23:02 +03:00
|
|
|
if e != nil {
|
|
|
|
log.Error("os.Chtimes(): %v", e)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
config.Lock()
|
|
|
|
for k := range config.Filters {
|
|
|
|
f := &config.Filters[k]
|
|
|
|
if f.ID != uf.ID || f.URL != uf.URL {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
f.LastUpdated = uf.LastUpdated
|
|
|
|
if !updated {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
log.Info("Updated filter #%d. Rules: %d -> %d",
|
|
|
|
f.ID, f.RulesCount, uf.RulesCount)
|
|
|
|
f.Name = uf.Name
|
2019-07-04 14:00:20 +03:00
|
|
|
f.Data = nil
|
2019-03-18 17:23:02 +03:00
|
|
|
f.RulesCount = uf.RulesCount
|
|
|
|
f.checksum = uf.checksum
|
|
|
|
updateCount++
|
2018-11-28 20:14:54 +03:00
|
|
|
}
|
2019-03-18 17:23:02 +03:00
|
|
|
config.Unlock()
|
2018-11-28 20:14:54 +03:00
|
|
|
}
|
|
|
|
|
2019-10-09 19:51:26 +03:00
|
|
|
if updateCount != 0 {
|
|
|
|
enableFilters(false)
|
|
|
|
|
|
|
|
for i := range updateFilters {
|
|
|
|
uf := &updateFilters[i]
|
|
|
|
updated := updateFlags[i]
|
|
|
|
if !updated {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
_ = os.Remove(uf.Path() + ".old")
|
2019-01-24 20:11:01 +03:00
|
|
|
}
|
2018-11-28 20:14:54 +03:00
|
|
|
}
|
2019-10-09 19:51:26 +03:00
|
|
|
|
|
|
|
log.Debug("Filters: update finished")
|
2020-01-15 18:41:27 +03:00
|
|
|
return updateCount, false
|
2018-11-28 20:14:54 +03:00
|
|
|
}
|
|
|
|
|
2019-09-04 19:37:27 +03:00
|
|
|
// Allows printable UTF-8 text with CR, LF, TAB characters
|
|
|
|
func isPrintableText(data []byte) bool {
|
|
|
|
for _, c := range data {
|
|
|
|
if (c >= ' ' && c != 0x7f) || c == '\n' || c == '\r' || c == '\t' {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
|
2018-11-28 20:14:54 +03:00
|
|
|
// A helper function that parses filter contents and returns a number of rules and a filter name (if there's any)
|
2019-05-15 15:32:42 +03:00
|
|
|
func parseFilterContents(contents []byte) (int, string) {
|
2019-10-07 19:18:30 +03:00
|
|
|
data := string(contents)
|
2018-11-28 20:14:54 +03:00
|
|
|
rulesCount := 0
|
|
|
|
name := ""
|
|
|
|
seenTitle := false
|
|
|
|
|
|
|
|
// Count lines in the filter
|
2019-10-07 19:18:30 +03:00
|
|
|
for len(data) != 0 {
|
|
|
|
line := SplitNext(&data, '\n')
|
2019-03-15 16:02:48 +03:00
|
|
|
if len(line) == 0 {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
if line[0] == '!' {
|
|
|
|
m := filterTitleRegexp.FindAllStringSubmatch(line, -1)
|
|
|
|
if len(m) > 0 && len(m[0]) >= 2 && !seenTitle {
|
2018-11-28 20:14:54 +03:00
|
|
|
name = m[0][1]
|
|
|
|
seenTitle = true
|
|
|
|
}
|
2019-03-15 16:02:48 +03:00
|
|
|
} else {
|
2018-11-28 20:14:54 +03:00
|
|
|
rulesCount++
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-05-15 15:32:42 +03:00
|
|
|
return rulesCount, name
|
2018-11-28 20:14:54 +03:00
|
|
|
}
|
|
|
|
|
2019-03-15 16:09:43 +03:00
|
|
|
// Perform upgrade on a filter
|
|
|
|
func (filter *filter) update() (bool, error) {
|
2019-02-12 19:22:17 +03:00
|
|
|
log.Tracef("Downloading update for filter %d from %s", filter.ID, filter.URL)
|
2018-11-28 20:14:54 +03:00
|
|
|
|
2019-07-09 18:37:24 +03:00
|
|
|
resp, err := config.client.Get(filter.URL)
|
2018-11-28 20:14:54 +03:00
|
|
|
if resp != nil && resp.Body != nil {
|
|
|
|
defer resp.Body.Close()
|
|
|
|
}
|
|
|
|
if err != nil {
|
|
|
|
log.Printf("Couldn't request filter from URL %s, skipping: %s", filter.URL, err)
|
|
|
|
return false, err
|
|
|
|
}
|
|
|
|
|
|
|
|
if resp.StatusCode != 200 {
|
|
|
|
log.Printf("Got status code %d from URL %s, skipping", resp.StatusCode, filter.URL)
|
|
|
|
return false, fmt.Errorf("got status code != 200: %d", resp.StatusCode)
|
|
|
|
}
|
|
|
|
|
|
|
|
body, err := ioutil.ReadAll(resp.Body)
|
|
|
|
if err != nil {
|
|
|
|
log.Printf("Couldn't fetch filter contents from URL %s, skipping: %s", filter.URL, err)
|
|
|
|
return false, err
|
|
|
|
}
|
|
|
|
|
|
|
|
// Check if the filter has been really changed
|
2019-03-15 16:49:10 +03:00
|
|
|
checksum := crc32.ChecksumIEEE(body)
|
|
|
|
if filter.checksum == checksum {
|
2019-02-12 19:22:17 +03:00
|
|
|
log.Tracef("Filter #%d at URL %s hasn't changed, not updating it", filter.ID, filter.URL)
|
2018-11-28 20:14:54 +03:00
|
|
|
return false, nil
|
|
|
|
}
|
|
|
|
|
2019-09-27 13:22:08 +03:00
|
|
|
var firstChunk []byte
|
|
|
|
if len(body) <= 4096 {
|
|
|
|
firstChunk = body
|
|
|
|
} else {
|
|
|
|
firstChunk = body[:4096]
|
|
|
|
}
|
|
|
|
if !isPrintableText(firstChunk) {
|
2019-09-04 19:37:27 +03:00
|
|
|
return false, fmt.Errorf("Data contains non-printable characters")
|
|
|
|
}
|
|
|
|
|
2019-09-27 13:22:08 +03:00
|
|
|
s := strings.ToLower(string(firstChunk))
|
2019-09-04 19:37:27 +03:00
|
|
|
if strings.Index(s, "<html") >= 0 ||
|
|
|
|
strings.Index(s, "<!doctype") >= 0 {
|
|
|
|
return false, fmt.Errorf("Data is HTML, not plain text")
|
|
|
|
}
|
|
|
|
|
2019-03-15 16:49:10 +03:00
|
|
|
// Extract filter name and count number of rules
|
2019-05-15 15:32:42 +03:00
|
|
|
rulesCount, filterName := parseFilterContents(body)
|
2018-11-28 20:14:54 +03:00
|
|
|
log.Printf("Filter %d has been updated: %d bytes, %d rules", filter.ID, len(body), rulesCount)
|
2019-03-15 16:49:10 +03:00
|
|
|
if filterName != "" {
|
|
|
|
filter.Name = filterName
|
|
|
|
}
|
2018-11-28 20:14:54 +03:00
|
|
|
filter.RulesCount = rulesCount
|
2019-05-15 15:32:42 +03:00
|
|
|
filter.Data = body
|
2019-03-15 16:49:10 +03:00
|
|
|
filter.checksum = checksum
|
2018-11-28 20:14:54 +03:00
|
|
|
|
|
|
|
return true, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// saves filter contents to the file in dataDir
|
2019-07-04 14:00:20 +03:00
|
|
|
// This method is safe to call during filters update,
|
|
|
|
// because it creates a new file and then renames it,
|
|
|
|
// so the currently opened file descriptors to the old filter file remain valid.
|
2018-11-28 20:14:54 +03:00
|
|
|
func (filter *filter) save() error {
|
|
|
|
filterFilePath := filter.Path()
|
|
|
|
log.Printf("Saving filter %d contents to: %s", filter.ID, filterFilePath)
|
|
|
|
|
2019-05-15 15:32:42 +03:00
|
|
|
err := file.SafeWrite(filterFilePath, filter.Data)
|
2019-02-10 21:44:16 +03:00
|
|
|
|
|
|
|
// update LastUpdated field after saving the file
|
|
|
|
filter.LastUpdated = filter.LastTimeUpdated()
|
|
|
|
return err
|
2018-11-28 20:14:54 +03:00
|
|
|
}
|
|
|
|
|
2019-10-09 19:51:26 +03:00
|
|
|
func (filter *filter) saveAndBackupOld() error {
|
|
|
|
filterFilePath := filter.Path()
|
2019-10-21 19:49:56 +03:00
|
|
|
err := os.Rename(filterFilePath, filterFilePath+".old")
|
2019-10-23 14:19:43 +03:00
|
|
|
if err != nil && !os.IsNotExist(err) {
|
2019-10-21 19:49:56 +03:00
|
|
|
return err
|
|
|
|
}
|
2019-10-09 19:51:26 +03:00
|
|
|
return filter.save()
|
|
|
|
}
|
|
|
|
|
2018-11-28 20:14:54 +03:00
|
|
|
// loads filter contents from the file in dataDir
|
|
|
|
func (filter *filter) load() error {
|
|
|
|
filterFilePath := filter.Path()
|
2019-02-07 18:24:12 +03:00
|
|
|
log.Tracef("Loading filter %d contents to: %s", filter.ID, filterFilePath)
|
2018-11-28 20:14:54 +03:00
|
|
|
|
|
|
|
if _, err := os.Stat(filterFilePath); os.IsNotExist(err) {
|
|
|
|
// do nothing, file doesn't exist
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
filterFileContents, err := ioutil.ReadFile(filterFilePath)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2019-02-07 18:24:12 +03:00
|
|
|
log.Tracef("File %s, id %d, length %d", filterFilePath, filter.ID, len(filterFileContents))
|
2019-05-15 15:32:42 +03:00
|
|
|
rulesCount, _ := parseFilterContents(filterFileContents)
|
2018-11-28 20:14:54 +03:00
|
|
|
|
|
|
|
filter.RulesCount = rulesCount
|
2019-07-04 14:00:20 +03:00
|
|
|
filter.Data = nil
|
2019-03-15 16:49:10 +03:00
|
|
|
filter.checksum = crc32.ChecksumIEEE(filterFileContents)
|
2019-02-10 21:44:16 +03:00
|
|
|
filter.LastUpdated = filter.LastTimeUpdated()
|
2018-11-28 20:14:54 +03:00
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2019-03-18 14:12:04 +03:00
|
|
|
// Clear filter rules
|
|
|
|
func (filter *filter) unload() {
|
2019-05-15 15:32:42 +03:00
|
|
|
filter.Data = nil
|
2019-03-18 14:12:04 +03:00
|
|
|
filter.RulesCount = 0
|
|
|
|
}
|
|
|
|
|
2018-11-28 20:14:54 +03:00
|
|
|
// Path to the filter contents
|
|
|
|
func (filter *filter) Path() string {
|
2019-10-02 16:53:23 +03:00
|
|
|
return filepath.Join(config.getDataDir(), filterDir, strconv.FormatInt(filter.ID, 10)+".txt")
|
2018-11-28 20:14:54 +03:00
|
|
|
}
|
2019-02-10 21:44:16 +03:00
|
|
|
|
2019-03-11 03:49:32 +03:00
|
|
|
// LastTimeUpdated returns the time when the filter was last time updated
|
2019-02-10 21:44:16 +03:00
|
|
|
func (filter *filter) LastTimeUpdated() time.Time {
|
|
|
|
filterFilePath := filter.Path()
|
2019-03-15 16:02:48 +03:00
|
|
|
s, err := os.Stat(filterFilePath)
|
|
|
|
if os.IsNotExist(err) {
|
2019-02-10 21:44:16 +03:00
|
|
|
// if the filter file does not exist, return 0001-01-01
|
|
|
|
return time.Time{}
|
|
|
|
}
|
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
// if the filter file does not exist, return 0001-01-01
|
|
|
|
return time.Time{}
|
|
|
|
}
|
|
|
|
|
|
|
|
// filter file modified time
|
|
|
|
return s.ModTime()
|
|
|
|
}
|
2019-10-09 19:51:26 +03:00
|
|
|
|
|
|
|
func enableFilters(async bool) {
|
|
|
|
var filters map[int]string
|
2019-10-30 11:52:58 +03:00
|
|
|
if config.DNS.FilteringEnabled {
|
2019-10-09 19:51:26 +03:00
|
|
|
// convert array of filters
|
|
|
|
filters = make(map[int]string)
|
|
|
|
|
|
|
|
userFilter := userFilter()
|
|
|
|
filters[int(userFilter.ID)] = string(userFilter.Data)
|
|
|
|
|
|
|
|
for _, filter := range config.Filters {
|
|
|
|
if !filter.Enabled {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
filters[int(filter.ID)] = filter.Path()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-12-11 12:38:58 +03:00
|
|
|
_ = Context.dnsFilter.SetFilters(filters, async)
|
2019-10-09 19:51:26 +03:00
|
|
|
}
|