diff options
Diffstat (limited to 'src/lists.c')
-rw-r--r-- | src/lists.c | 153 |
1 files changed, 153 insertions, 0 deletions
diff --git a/src/lists.c b/src/lists.c new file mode 100644 index 0000000..00e29ba --- /dev/null +++ b/src/lists.c @@ -0,0 +1,153 @@ +/* +Copyright (C) 2017 Paweł Redman + +This program is free software; you can redistribute it and/or +modify it under the terms of the GNU General Public License +as published by the Free Software Foundation; either version 3 +of the License, or (at your option) any later version. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software Foundation, +Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +*/ + +#include "shared.h" + +typedef enum { + ENTRY_REVDNS, + ENTRY_WHOIS +} entry_type_t; + +typedef struct { + entry_type_t type; + + char *pattern; + int niceness; + + eli_header list; +} entry_t; + +static entry_t *entry_list; +static size_t total_entries; + +static int parse_entry(lexer_state_t *lexer, vstr_t *token, entry_type_t type) +{ + entry_t *entry; + int niceness; + char *pattern; + + if (lexer_get_token(lexer)) { + lexer_perror_eg(lexer, "the niceness adjustment"); + return 1; + } + + niceness = atoi(vstr_to_cstr(token)); + + if (lexer_get_token(lexer)) { + lexer_perror_eg(lexer, "the pattern"); + return 1; + } + + pattern = vstr_strdup(token); + if (!pattern) { + lexer_perror(lexer, "out of memory\n"); + return 1; + } + + entry = malloc(sizeof(entry_t)); + if (!entry) { + free(pattern); + lexer_perror(lexer, "out of memory\n"); + return 1; + } + + entry->type = type; + entry->niceness = niceness; + entry->pattern = pattern; + eli_append(&entry_list, entry, list); + total_entries++; + + return 0; +} + +// FIXME: Make this MT-safe (lock the list before adding shit to it) +int lists_load(const char *file, size_t depth) +{ + int error = 1, rv; + lexer_state_t lexer; + vstr_t token = VSTR_INITIALIZER; + + if (depth > 20) { + eprintf("%s: error: deep recursion (probably a circular dependency)\n", file); + return 1; + } + + if ((rv = lexer_open(&lexer, file, &token))) { + perror(file); + return 1; + } + + while (!lexer_get_token(&lexer)) { + if (!vstr_cmp(&token, "include")) { + if (lexer_get_token(&lexer)) { + lexer_perror_eg(&lexer, "the filename"); + goto out; + } + + // FIXME: relative paths + if (lists_load(vstr_to_cstr(&token), depth + 1)) { + lexer_perror(&lexer, "included from here\n"); + goto out; + } + } else if (!vstr_cmp(&token, "revdns")) { + if (parse_entry(&lexer, &token, ENTRY_REVDNS)) + goto out; + } else if (!vstr_cmp(&token, "whois")) { + if (parse_entry(&lexer, &token, ENTRY_WHOIS)) + goto out; + } else { + lexer_perror_eg(&lexer, "'whois' or 'revdns'"); + goto out; + } + } + + if (depth == 0) + DEBUG("loaded entries: %zu\n", total_entries); + + error = 0; +out: + lexer_close(&lexer); + return error; +} + +void lists_destroy(void) +{ + // TODO... +} + +int lists_test(const char *revdns, const char *whois) +{ + entry_t *entry; + int total = 0; + + eli_for(entry, entry_list, list) { + // TODO: regexps + if (entry->type == ENTRY_REVDNS && + !strstr(revdns, entry->pattern)) + continue; + + if (entry->type == ENTRY_WHOIS && + !strstr(whois, entry->pattern)) + continue; + + // TODO: avoid overflows + total += entry->niceness; + } + + return total; +} |