|
37 | 37 | import hashlib
|
38 | 38 | import fnmatch
|
39 | 39 | import subprocess
|
| 40 | +import types |
40 | 41 |
|
41 | 42 | try:
|
42 | 43 | from io import BytesIO
|
@@ -279,31 +280,33 @@ def progress_hook(self, content_length, bytes_read):
|
279 | 280 | sys.stdout.write("\n")
|
280 | 281 | sys.stdout.flush()
|
281 | 282 |
|
282 |
| - def run(self): |
283 |
| - url = self.get_rule_url() |
| 283 | + def fetch(self, url): |
| 284 | + logger.info("Fetching %s." % (url)) |
284 | 285 | tmp_filename = os.path.join(self.args.temp, os.path.basename(url))
|
285 | 286 | if not self.args.force and os.path.exists(tmp_filename):
|
286 | 287 | if time.time() - os.stat(tmp_filename).st_mtime < (60 * 15):
|
287 | 288 | logger.info(
|
288 | 289 | "Last download less than 15 minutes ago. Not fetching.")
|
289 |
| - return self.files_as_dict() |
| 290 | + return self.files_as_dict(tmp_filename) |
290 | 291 | if self.check_checksum(tmp_filename, url):
|
291 | 292 | logger.info("Remote checksum has not changed. Not fetching.")
|
292 |
| - return self.files_as_dict() |
| 293 | + return self.files_as_dict(tmp_filename) |
293 | 294 | if not os.path.exists(self.args.temp):
|
294 | 295 | os.makedirs(self.args.temp)
|
295 |
| - logger.info("Fetching %s." % (url)) |
296 | 296 | idstools.net.get(
|
297 | 297 | url, open(tmp_filename, "wb"), progress_hook=self.progress_hook)
|
298 | 298 | logger.info("Done.")
|
299 |
| - return self.files_as_dict() |
| 299 | + return self.files_as_dict(tmp_filename) |
300 | 300 |
|
301 |
| - def basename(self): |
302 |
| - return os.path.basename(self.get_rule_url()) |
| 301 | + def run(self): |
| 302 | + urls = self.get_rule_url() |
| 303 | + files = {} |
| 304 | + for url in urls: |
| 305 | + files.update(self.fetch(url)) |
| 306 | + return files |
303 | 307 |
|
304 |
| - def files_as_dict(self): |
305 |
| - files = idstools.util.archive_to_dict( |
306 |
| - os.path.join(self.args.temp, self.basename())) |
| 308 | + def files_as_dict(self, filename): |
| 309 | + files = idstools.util.archive_to_dict(filename) |
307 | 310 |
|
308 | 311 | # Erase path information.
|
309 | 312 | for key in files.keys():
|
@@ -626,7 +629,7 @@ def main():
|
626 | 629 | help="Output merged rules file")
|
627 | 630 | parser.add_argument("--yaml-fragment", metavar="<filename>",
|
628 | 631 | help="Output YAML fragment for rule inclusion")
|
629 |
| - parser.add_argument("--url", metavar="<url>", |
| 632 | + parser.add_argument("--url", metavar="<url>", action="append", |
630 | 633 | help="URL to use instead of auto-generating one")
|
631 | 634 | parser.add_argument("--local", metavar="<filename>",
|
632 | 635 | help="Local rule files or directories")
|
@@ -655,6 +658,8 @@ def main():
|
655 | 658 | help="Command to run after update if modified")
|
656 | 659 | args = parser.parse_args()
|
657 | 660 |
|
| 661 | + print(args) |
| 662 | + |
658 | 663 | if args.verbose:
|
659 | 664 | logger.setLevel(logging.DEBUG)
|
660 | 665 | if args.quiet:
|
|
0 commit comments