2 # -*- coding: utf-8 -*-
4 # For the people of Smubworld!
17 if sys
.version_info
[0] >= 3:
19 import ipaddress
as ipaddr
20 from urllib
.request
import (urlopen
, Request
)
21 from urllib
.error
import URLError
24 import ConfigParser
as configparser
25 from urllib2
import (urlopen
, Request
, URLError
)
27 from embedded_ipaddr
import ipaddr
28 ipaddr
.ip_address
= ipaddr
.IPAddress
31 ipaddr
.ip_address
= ipaddr
.IPAddress
33 is_win32
= (sys
.platform
== "win32")
35 __program__
= 'blockfinder'
36 __url__
= 'https://github.com/ioerror/blockfinder/'
37 __author__
= 'Jacob Appelbaum <jacob@appelbaum.net>, David <db@d1b.org>'
38 __copyright__
= 'Copyright (c) 2010'
39 __license__
= 'See LICENSE for licensing information'
40 __version__
= '3.14159'
43 from future
import antigravity
50 def __init__(self
, cache_dir
, verbose
=False):
51 self
.cache_dir
= cache_dir
52 self
.verbose
= verbose
55 self
.db_version
= "0.0.4"
56 self
.db_path
= os
.path
.join(self
.cache_dir
+ "sqlitedb")
58 def erase_database(self
):
59 """ Erase the database file. """
60 if os
.path
.exists(self
.db_path
):
61 os
.remove(self
.db_path
)
63 def connect_to_database(self
):
64 """ Connect to the database cache, possibly after creating it if
65 it doesn't exist yet, or after making sure an existing
66 database cache has the correct version. Return True if a
67 connection could be established, False otherwise. """
68 if not os
.path
.exists(self
.cache_dir
):
70 print("Initializing the cache directory...")
71 os
.mkdir(self
.cache_dir
)
72 if os
.path
.exists(self
.db_path
):
73 cache_version
= self
.get_db_version()
75 cache_version
= "0.0.1"
76 if cache_version
!= self
.db_version
:
77 print(("The existing database cache uses version %s, "
78 "not the expected %s." % (cache_version
,
81 self
.conn
= sqlite3
.connect(self
.db_path
)
82 self
.cursor
= self
.conn
.cursor()
83 self
.create_assignments_table()
84 self
.create_asn_description_table()
85 self
.create_asn_assignments_table()
88 def __get_default_config_file_obj(self
):
90 file_path
= os
.path
.join(self
.cache_dir
, 'db.cfg')
91 if not os
.path
.exists(file_path
):
93 return open(file_path
, open_flags
)
95 def _get_db_config(self
, file_obj
=None):
96 """ Return the database configuration object from the provided
97 file_obj if provided, otherwise from the default database
98 configuration file. """
100 file_obj
= self
.__get
_default
_config
_file
_obj
()
101 config
= configparser
.SafeConfigParser()
102 config
.readfp(file_obj
)
106 def set_db_version(self
, file_obj
=None):
107 """ Set the database version string in the config file. """
109 file_obj
= self
.__get
_default
_config
_file
_obj
()
110 config
= self
._get
_db
_config
()
111 if not config
.has_section('db'):
112 config
.add_section('db')
113 config
.set('db', 'version', self
.db_version
)
114 config
.write(file_obj
)
117 def get_db_version(self
):
118 """ Read and return the database version string from the config
120 config
= self
._get
_db
_config
()
121 if not config
.has_section('db'):
123 return config
.get('db', 'version')
125 def commit_and_close_database(self
):
129 def create_assignments_table(self
):
130 """ Create the assignments table that stores all assignments from
131 IPv4/IPv6/ASN to country code. Blocks are stored as first hex
132 of and first hex after the assignment. Numbers are stored
133 as hex strings, because SQLite's INTEGER type only holds up to
134 63 unsigned bits, which is not enough to store a /64 IPv6
135 block. Hex strings have leading zeros, with IPv6 addresses
136 being 33 hex characters long and IPv4 addresses and ASN being
137 9 hex characters long. The first number after an assignment
138 range is stored instead of the last number in the range to
139 facilitate comparisons with neighboring ranges. """
140 sql
= ('CREATE TABLE IF NOT EXISTS assignments(start_hex TEXT, '
141 'next_start_hex TEXT, num_type TEXT, country_code TEXT, '
142 'source_type TEXT, source_name TEXT)')
143 self
.cursor
.execute(sql
)
146 def create_asn_description_table(self
):
147 """ Create the assignments table that stores all the descriptions
148 associated with ASNs. """
149 sql
= ('CREATE TABLE IF NOT EXISTS asn_descriptions(as_num INT, '
150 'source_name TEXT, description TEXT)')
151 self
.cursor
.execute(sql
)
152 sql
= ('CREATE INDEX IF NOT EXISTS DescriptionsByASN ON '
153 'asn_descriptions ( as_num )')
154 self
.cursor
.execute(sql
)
157 def create_asn_assignments_table(self
):
158 """ Create the assignments table that stores the assignments from
160 # XXX: IPv6 not yet supported. (Not available from routeviews?)
161 sql
= ('CREATE TABLE IF NOT EXISTS asn_assignments(start_hex TEXT, '
162 'next_start_hex TEXT, num_type TEXT, as_num INT, '
163 'source_type TEXT, source_name TEXT, PRIMARY KEY(start_hex, '
165 self
.cursor
.execute(sql
)
166 sql
= ('CREATE INDEX IF NOT EXISTS ASNEntriesByStartHex on '
167 'asn_assignments ( start_hex )')
168 self
.cursor
.execute(sql
)
171 def delete_assignments(self
, source_type
):
172 """ Delete all assignments from the database cache matching a
173 given source type ("rir", "lir", etc.). """
174 sql
= 'DELETE FROM assignments WHERE source_type = ?'
175 self
.cursor
.execute(sql
, (source_type
, ))
178 def delete_asn_descriptions(self
):
179 """ Delete all asn descriptions from the database cache. """
180 sql
= 'DELETE FROM asn_descriptions'
181 self
.cursor
.execute(sql
)
184 def delete_asn_assignments(self
):
185 """ Delete all the bgp netblock to as entries """
186 sql
= 'DELETE FROM asn_assignments'
187 self
.cursor
.execute(sql
)
190 def insert_assignment(self
, start_num
, end_num
, num_type
,
191 country_code
, source_type
, source_name
):
192 """ Insert an assignment into the database cache, without
193 commiting after the insertion. """
194 sql
= ('INSERT INTO assignments (start_hex, next_start_hex, '
195 'num_type, country_code, source_type, source_name) '
196 'VALUES (?, ?, ?, ?, ?, ?)')
197 if num_type
== 'ipv6':
198 start_hex
= '%033x' % start_num
199 next_start_hex
= '%033x' % (end_num
+ 1)
201 start_hex
= '%09x' % start_num
202 next_start_hex
= '%09x' % (end_num
+ 1)
203 country_code
= normalize_country_code(country_code
)
204 self
.cursor
.execute(sql
, (start_hex
, next_start_hex
, num_type
,
205 country_code
, source_type
, source_name
))
207 def insert_asn_description(self
, asn
, source_name
, description
):
208 sql
= ('INSERT INTO asn_descriptions '
209 '(as_num, source_name, description) '
211 self
.cursor
.execute(sql
, (asn
, source_name
, unicode(description
)))
213 def insert_asn_assignment(self
, start_num
, end_num
, num_type
, asn
,
214 source_type
, source_name
):
215 # XXX: This is sqlite specific syntax
216 sql
= ('INSERT OR IGNORE INTO asn_assignments (start_hex, '
217 'next_start_hex, num_type, as_num, source_type, source_name) '
218 'VALUES (?, ?, ?, ?, ?, ?)')
219 if num_type
== 'ipv6':
220 start_hex
= '%033x' % start_num
221 next_start_hex
= '%033x' % (end_num
+ 1)
223 start_hex
= '%09x' % start_num
224 next_start_hex
= '%09x' % (end_num
+ 1)
225 self
.cursor
.execute(sql
, (start_hex
, next_start_hex
, num_type
, asn
,
226 source_type
, source_name
))
228 def commit_changes(self
):
229 """ Commit changes, e.g., after inserting assignments into the
233 def fetch_assignments(self
, num_type
, country_code
):
234 """ Fetch all assignments from the database cache matching the
235 given number type ("asn", "ipv4", or "ipv6") and country code.
236 The result is a sorted list of tuples containing (start_num,
238 sql
= ('SELECT start_hex, next_start_hex FROM assignments '
239 'WHERE num_type = ? AND country_code = ? '
240 'ORDER BY start_hex')
241 self
.cursor
.execute(sql
, (num_type
, country_code
))
243 for row
in self
.cursor
:
244 result
.append((long(row
[0], 16), long(row
[1], 16) - 1))
247 def fetch_country_code(self
, num_type
, source_type
, lookup_num
):
248 """ Fetch the country code from the database cache that is
249 assigned to the given number (e.g., IPv4 address in decimal
250 notation), number type (e.g., "ipv4"), and source type (e.g.,
252 sql
= ('SELECT country_code FROM assignments WHERE num_type = ? '
253 'AND source_type = ? AND start_hex <= ? '
254 'AND next_start_hex > ?')
255 if num_type
== 'ipv6':
256 lookup_hex
= '%033x' % long(lookup_num
)
258 lookup_hex
= '%09x' % long(lookup_num
)
259 self
.cursor
.execute(sql
, (num_type
, source_type
, lookup_hex
,
261 row
= self
.cursor
.fetchone()
265 def fetch_country_blocks_in_other_sources(self
, first_country_code
):
266 """ Fetch all assignments matching the given country code, then look
267 up to which country code(s) the same number ranges are assigned in
268 other source types. Return 8-tuples containing (1) first source
269 type, (2) first and (3) last number of the assignment in the first
270 source type, (4) second source type, (5) first and (6) last number
271 of the assignment in the second source type, (7) country code in
272 the second source type, and (8) number type. """
273 sql
= ('SELECT first.source_type, first.start_hex, '
274 'first.next_start_hex, second.source_type, '
275 'second.start_hex, second.next_start_hex, '
276 'second.country_code, first.num_type '
277 'FROM assignments AS first '
278 'JOIN assignments AS second '
279 'WHERE first.country_code = ? '
280 'AND first.start_hex <= second.next_start_hex '
281 'AND first.next_start_hex >= second.start_hex '
282 'AND first.num_type = second.num_type '
283 'ORDER BY first.source_type, first.start_hex, '
284 'second.source_type, second.start_hex')
285 self
.cursor
.execute(sql
, (first_country_code
, ))
287 for row
in self
.cursor
:
288 result
.append((str(row
[0]), long(row
[1], 16),
289 long(row
[2], 16) - 1, str(row
[3]), long(row
[4], 16),
290 long(row
[5], 16) - 1, str(row
[6]), str(row
[7])))
293 def fetch_org_by_ip_address(self
, lookup_str
, num_type
):
294 if num_type
== 'ipv4':
295 lookup_hex
= '%09x' % long(int(lookup_str
))
297 lookup_hex
= '%033x' % long(int(lookup_str
))
298 sql
= ('SELECT asn_descriptions.as_num, asn_descriptions.description, '
299 'asn_assignments.start_hex, asn_assignments.next_start_hex '
300 'FROM asn_descriptions JOIN asn_assignments ON '
301 'asn_assignments.as_num = asn_descriptions.as_num '
302 'WHERE num_type = ? AND start_hex <= ? AND next_start_hex > ?')
303 self
.cursor
.execute(sql
, (num_type
, lookup_hex
, lookup_hex
))
304 row
= self
.cursor
.fetchall()
308 def fetch_org_by_ip_range(self
, lookup_start
, lookup_end
, num_type
):
309 if num_type
== 'ipv4':
310 lookup_start_hex
= '%09x' % long(int(lookup_start
))
311 lookup_end_hex
= '%09x' % long(int(lookup_end
))
313 lookup_start_hex
= '%033x' % long(int(lookup_start
))
314 lookup_end_hex
= '%033x' % long(int(lookup_end
))
316 sql
= ('SELECT asn_descriptions.as_num, asn_descriptions.description, '
317 'asn_assignments.start_hex, asn_assignments.next_start_hex '
318 'FROM asn_descriptions JOIN asn_assignments ON '
319 'asn_assignments.as_num = asn_descriptions.as_num '
320 'WHERE num_type = ? AND start_hex >= ? AND next_start_hex <= ?')
321 self
.cursor
.execute(sql
, (num_type
, lookup_start_hex
, lookup_end_hex
))
322 row
= self
.cursor
.fetchall()
326 def _concatenate_and_write(
327 self
, records
, write_function
=None, record_filter
=None, bits
=32):
331 start_hex
, next_start_hex
, record
= \
332 long(row
[0], 16), long(row
[1], 16), str(row
[2])
333 nb
= bits
- int(log(next_start_hex
- start_hex
, 2))
334 net
= ipaddr
.IPNetwork("%s/%d" %
335 (ipaddr
.IPAddress(start_hex
), nb
))
336 if callable(record_filter
):
337 record
= record_filter(record
)
341 # Concatenate adjacent blocks of the same country
342 if netblocks
and netblocks
[-1][1] == record
:
343 pn
= netblocks
[-1][0]
344 nb
= bits
- int(log(int(net
.network
) +
345 int(net
.numhosts
) - int(pn
.network
), 2))
346 netblocks
[-1] = (ipaddr
.IPNetwork("%s/%d" %
347 (pn
.network
, nb
)), record
)
349 # if the adjacent blocks aren't the same country,
350 # write the last block out to csv and add the new block
351 # to the list for possible concatenation
353 prev_n
, prev_record
= netblocks
.pop()
355 write_function(prev_n
, prev_record
)
356 netblocks
.append((net
, record
))
358 # this is the base case
360 netblocks
.append((net
, record
))
362 def export_asn(self
, filename
, num_type
):
363 """ Export assignments to the CSV format used to build the
364 geoip-database asn lookup
366 sql
= ('SELECT start_hex, next_start_hex, as_num '
367 'FROM asn_assignments WHERE num_type = ? ORDER BY start_hex')
368 self
.cursor
.execute(sql
, (num_type
,))
370 f
= open(filename
, 'w')
372 print("Unable to open %s" % filename
)
375 def write_csv_line(network
, asn
):
377 f
.write(""""%s","%s","%d","%d","%s"\n""" % (network
.network
,
379 int(network
.network
),
380 int(network
.broadcast
),
382 if num_type
== 'ipv6':
384 elif num_type
== 'ipv4':
389 self
._concatenate
_and
_write
(self
.cursor
, write_function
=write_csv_line
,
393 def export_geoip(self
, lookup
, filename
, num_type
):
394 """ Export assignments to the CSV format used to build the
395 geoip-database package """
397 sql
= ('SELECT start_hex, next_start_hex, country_code '
398 'FROM assignments WHERE num_type = ? ORDER BY start_hex')
399 self
.cursor
.execute(sql
, (num_type
,))
402 f
= open(filename
, 'w')
404 print("Unable to open %s" % filename
)
407 def write_csv_line(network
, country_code
):
408 country_name
= lookup
.get_name_from_country_code(country_code
)
410 country_name
= country_name
.split(
411 "#")[0].strip() # Drop comments
412 f
.write(""""%s","%s","%d","%d","%s","%s"\n""" % (
415 int(network
.network
),
416 int(network
.broadcast
),
420 if num_type
== 'ipv6':
422 elif num_type
== 'ipv4':
427 self
._concatenate
_and
_write
(self
.cursor
, write_function
=write_csv_line
,
428 record_filter
=str.upper
, bits
=ip_bits
)
432 class DownloaderParser
:
434 def __init__(self
, cache_dir
, database_cache
, user_agent
,
436 self
.cache_dir
= cache_dir
437 self
.database_cache
= database_cache
438 self
.user_agent
= user_agent
439 self
.verbose
= verbose
442 http://geolite.maxmind.com/download/geoip/database/GeoIPCountryCSV.zip
443 http://geolite.maxmind.com/download/geoip/database/GeoIPv6.csv.gz
447 ftp://ftp.arin.net/pub/stats/arin/delegated-arin-extended-latest
448 ftp://ftp.ripe.net/ripe/stats/delegated-ripencc-latest
449 ftp://ftp.afrinic.net/pub/stats/afrinic/delegated-afrinic-latest
450 ftp://ftp.apnic.net/pub/stats/apnic/delegated-apnic-latest
451 ftp://ftp.lacnic.net/pub/stats/lacnic/delegated-lacnic-latest
455 ftp://ftp.ripe.net/ripe/dbase/split/ripe.db.inetnum.gz
456 ftp://ftp.ripe.net/ripe/dbase/split/ripe.db.inet6num.gz
459 COUNTRY_CODE_URL
= ("http://www.iso.org/iso/home/standards/country_codes/"
460 "country_names_and_code_elements_txt-temp.htm")
462 ASN_DESCRIPTION_URL
= "http://www.cidr-report.org/as2.0/autnums.html"
464 ASN_ASSIGNMENT_URLS
= [
465 ('http://archive.routeviews.org/oix-route-views/'
466 'oix-full-snapshot-latest.dat.bz2'),
469 def download_maxmind_files(self
):
470 """ Download all LIR delegation urls. """
471 for maxmind_url
in self
.MAXMIND_URLS
.split():
472 self
._download
_to
_cache
_dir
(maxmind_url
)
474 def download_rir_files(self
):
475 """ Download all RIR delegation files including md5 checksum. """
476 for rir_url
in self
.RIR_URLS
.split():
477 rir_md5_url
= rir_url
+ '.md5'
478 self
._download
_to
_cache
_dir
(rir_url
)
479 self
._download
_to
_cache
_dir
(rir_md5_url
)
481 def download_lir_files(self
):
482 """ Download all LIR delegation urls. """
483 for lir_url
in self
.LIR_URLS
.split():
484 self
._download
_to
_cache
_dir
(lir_url
)
486 def download_country_code_file(self
):
487 """ Download and save the latest semicolon-separated open country
489 self
._download
_to
_cache
_dir
(self
.COUNTRY_CODE_URL
)
491 def download_asn_description_file(self
):
492 """ Download and save the latest ASN to Name report from
494 self
._download
_to
_cache
_dir
(self
.ASN_DESCRIPTION_URL
)
496 def download_asn_assignment_files(self
):
497 """ Download and save the latest routing snapshots. """
498 for assignment_url
in self
.ASN_ASSIGNMENT_URLS
:
499 self
._download
_to
_cache
_dir
(assignment_url
)
501 def _download_to_cache_dir(self
, url
):
502 """ Fetch a resource (with progress bar) and store contents to the
503 local cache directory under the file name given in the URL. """
504 if not os
.path
.exists(self
.cache_dir
):
506 print("Initializing the cache directory...")
507 os
.mkdir(self
.cache_dir
)
508 filename
= url
.split('/')[-1]
513 req
.add_header('User-Agent', self
.user_agent
)
514 # TODO Allow use of a proxy.
515 # req.set_proxy(host, type)
517 fetcher
= urlopen(req
)
518 except URLError
as err
:
519 msg
= "An error occurred while attempting to cache file from:"
520 print(("%s\n\t%s\n\t%s" % (msg
, url
, str(err
))))
522 length_header
= fetcher
.headers
.get("Content-Length")
525 expected_bytes
= int(length_header
)
526 print(("Fetching %d kilobytes" %
527 round(float(expected_bytes
/ 1024), 2)))
528 download_started
= time
.time()
529 output_file
= open(os
.path
.join(self
.cache_dir
, filename
), "wb")
530 received_bytes
, seconds_elapsed
= 0, 0
532 seconds_elapsed
= time
.time() - download_started
533 if expected_bytes
>= 0:
534 self
._update
_progress
_bar
(received_bytes
, expected_bytes
,
536 chunk
= fetcher
.read(1024)
538 if expected_bytes
>= 0 and received_bytes
!= expected_bytes
:
539 print(("Expected %s bytes, only received %s" %
540 (expected_bytes
, received_bytes
)))
543 received_bytes
+= len(chunk
)
544 output_file
.write(chunk
)
547 def _update_progress_bar(self
, received_bytes
, expected_bytes
,
549 """ Write a progress bar to the console. """
551 rows
= 100 # use some WinCon function for these?
552 columns
= 80 # but not really important.
555 rows
, columns
= list(map(int, os
.popen('stty size', 'r'
558 if seconds_elapsed
== 0:
560 percent_done
= float(received_bytes
) / float(expected_bytes
)
561 caption
= "%.2f K/s" % (received_bytes
/ 1024 / seconds_elapsed
)
562 width
= columns
- 4 - len(caption
)
563 sys
.stdout
.write("[%s>%s] %s%s" % (
564 "=" * int(percent_done
* width
),
565 "." * (width
- int(percent_done
* width
)), caption
, EOL
))
568 def check_rir_file_mtimes(self
):
569 """ Return True if the mtime of any RIR file in our cache directory
570 is > 24 hours, False otherwise. """
571 if not os
.path
.exists(self
.cache_dir
):
573 for rir_url
in self
.RIR_URLS
.split():
574 rir_path
= os
.path
.join(self
.cache_dir
,
575 rir_url
.split('/')[-1])
576 if os
.path
.exists(rir_path
):
577 rir_stat
= os
.stat(rir_path
)
578 if (time
.time() - rir_stat
.st_mtime
) > 86400:
582 def verify_rir_files(self
):
583 """ Compute md5 checksums of all RIR files, compare them to the
584 provided .md5 files, and return True if the two checksums match,
585 or False otherwise. """
586 for rir_url
in self
.RIR_URLS
.split():
587 rir_path
= os
.path
.join(self
.cache_dir
,
588 rir_url
.split('/')[-1])
589 rir_md5_path
= os
.path
.join(self
.cache_dir
,
590 rir_url
.split('/')[-1] + '.md5')
591 if not os
.path
.exists(rir_md5_path
) or \
592 not os
.path
.exists(rir_path
):
594 rir_md5_file
= open(rir_md5_path
, 'r')
595 expected_checksum
= rir_md5_file
.read()
597 if "=" in expected_checksum
:
598 expected_checksum
= expected_checksum
.split("=")[-1].strip()
599 elif expected_checksum
== "":
601 print("No checksum... skipping verification...")
604 regex
= re
.compile("[a-f0-9]{32}")
605 regres
= regex
.findall(expected_checksum
)
607 print("Error: mutiple checksum found")
608 elif len(regres
) < 1:
609 print("Error: no checksum found")
611 expected_checksum
= regres
[0]
612 computed_checksum
= ""
613 rir_file
= open(rir_path
, 'rb')
614 rir_data
= rir_file
.read()
616 computed_checksum
= str(hashlib
.md5(rir_data
).hexdigest())
617 if expected_checksum
!= computed_checksum
:
618 print(("The computed md5 checksum of %s, %s, does *not* "
619 "match the provided checksum %s!" %
620 (rir_path
, computed_checksum
, expected_checksum
)))
622 def parse_maxmind_files(self
, maxmind_urls
=None):
623 """ Parse locally cached MaxMind files and insert assignments to the
624 local database cache, overwriting any existing MaxMind
627 maxmind_urls
= self
.MAXMIND_URLS
.split()
628 self
.database_cache
.delete_assignments('maxmind')
629 for maxmind_url
in maxmind_urls
:
630 maxmind_path
= os
.path
.join(self
.cache_dir
,
631 maxmind_url
.split('/')[-1])
632 if not os
.path
.exists(maxmind_path
):
633 print("Unable to find %s." % maxmind_path
)
635 if maxmind_path
.endswith('.zip'):
636 maxmind_zip_path
= zipfile
.ZipFile(maxmind_path
)
637 for contained_filename
in maxmind_zip_path
.namelist():
638 content
= maxmind_zip_path
.read(contained_filename
)
639 self
._parse
_maxmind
_content
(content
, 'maxmind',
641 elif maxmind_path
.endswith('.gz'):
642 content
= gzip
.open(maxmind_path
).read()
643 self
._parse
_maxmind
_content
(content
, 'maxmind', 'maxmind')
644 self
.database_cache
.commit_changes()
646 def import_maxmind_file(self
, maxmind_path
):
647 self
.database_cache
.delete_assignments(maxmind_path
)
648 if not os
.path
.exists(maxmind_path
):
649 print("Unable to find %s." % maxmind_path
)
651 content
= open(maxmind_path
).read()
652 self
._parse
_maxmind
_content
(content
, maxmind_path
, maxmind_path
)
653 self
.database_cache
.commit_changes()
655 def _parse_maxmind_content(self
, content
, source_type
, source_name
):
656 keys
= ['start_str', 'end_str', 'start_num', 'end_num',
657 'country_code', 'country_name']
658 for line
in content
.decode('utf-8').split('\n'):
659 if len(line
.strip()) == 0 or line
.startswith("#"):
661 line
= line
.replace('"', '').replace(' ', '').strip()
662 parts
= line
.split(',')
663 entry
= dict((k
, v
) for k
, v
in zip(keys
, parts
))
664 start_num
= int(entry
['start_num'])
665 end_num
= int(entry
['end_num'])
666 country_code
= str(entry
['country_code'])
667 start_ipaddr
= ipaddr
.ip_address(entry
['start_str'])
668 if isinstance(start_ipaddr
, ipaddr
.IPv4Address
):
672 self
.database_cache
.insert_assignment(
679 def parse_rir_files(self
, rir_urls
=None):
680 """ Parse locally cached RIR files and insert assignments to the local
681 database cache, overwriting any existing RIR assignments. """
683 rir_urls
= self
.RIR_URLS
.split()
684 self
.database_cache
.delete_assignments('rir')
685 keys
= "registry country_code type start value date status"
686 for rir_url
in rir_urls
:
687 rir_path
= os
.path
.join(self
.cache_dir
,
688 rir_url
.split('/')[-1])
689 if not os
.path
.exists(rir_path
):
690 print("Unable to find %s." % rir_path
)
692 for line
in open(rir_path
, 'r'):
693 if line
.startswith("#"):
695 entry
= dict((k
, v
) for k
, v
in
696 zip(keys
.split(), line
.strip().split("|")))
697 source_name
= str(entry
['registry'])
698 country_code
= str(entry
['country_code'])
699 if source_name
.replace(
700 ".", "", 1).isdigit() or country_code
== "*":
702 num_type
= entry
['type']
703 if num_type
== 'asn':
704 start_num
= end_num
= int(entry
['start'])
705 elif num_type
== 'ipv4':
706 start_num
= int(ipaddr
.IPv4Address(entry
['start']))
707 end_num
= start_num
+ int(entry
['value']) - 1
708 elif num_type
== 'ipv6':
709 network_str
= entry
['start'] + '/' + entry
['value']
710 network_ipaddr
= ipaddr
.IPv6Network(network_str
)
711 start_num
= int(network_ipaddr
.network_address
)
712 end_num
= int(network_ipaddr
.broadcast_address
)
713 self
.database_cache
.insert_assignment(
720 self
.database_cache
.commit_changes()
722 def parse_lir_files(self
, lir_urls
=None):
723 """ Parse locally cached LIR files and insert assignments to the local
724 database cache, overwriting any existing LIR assignments. """
726 lir_urls
= self
.LIR_URLS
.split()
727 self
.database_cache
.delete_assignments('lir')
728 for lir_url
in lir_urls
:
729 lir_path
= os
.path
.join(self
.cache_dir
,
730 lir_url
.split('/')[-1])
731 if not os
.path
.exists(lir_path
):
732 print("Unable to find %s." % lir_path
)
734 if lir_path
.endswith('.gz'):
735 lir_file
= gzip
.open(lir_path
)
737 lir_file
= open(lir_path
)
743 for line
in lir_file
:
744 line
= line
.decode('utf-8', 'ignore').replace("\n", "")
747 start_num
, end_num
, country_code
, num_type
= 0, 0, "", ""
748 elif not entry
and "inetnum:" in line
:
750 line
= line
.replace("inetnum:", "").strip()
751 start_str
= line
.split("-")[0].strip()
752 end_str
= line
.split("-")[1].strip()
753 start_num
= int(ipaddr
.IPv4Address(start_str
))
754 end_num
= int(ipaddr
.IPv4Address(end_str
))
757 except Exception as e
:
760 elif not entry
and "inet6num:" in line
:
762 network_str
= line
.replace("inet6num:", "").strip()
763 network_ipaddr
= ipaddr
.IPv6Network(network_str
)
764 start_num
= int(network_ipaddr
.network_address
)
765 end_num
= int(network_ipaddr
.broadcast_address
)
768 except Exception as e
:
771 elif entry
and "country:" in line
:
772 country_code
= line
.replace("country:", "").strip()
773 self
.database_cache
.insert_assignment(
780 self
.database_cache
.commit_changes()
782 def parse_asn_description_file(self
, asn_description_url
=None):
783 """ Parse locally cached ASN to Description mappings and insert
784 mappings to the local database cache, overwriting any existing ASN
785 to Name assignments. """
786 if not asn_description_url
:
787 asn_description_url
= self
.ASN_DESCRIPTION_URL
788 self
.database_cache
.delete_asn_descriptions()
789 asn_description_path
= os
.path
.join(self
.cache_dir
,
790 asn_description_url
.split('/')[-1])
791 asn_descriptions
= open(asn_description_path
)
792 source_name
= 'cidr_report'
793 skiplen
= len('<a href="/cgi-bin/as-report?as=AS')
794 for line
in asn_descriptions
:
796 asn
, _name
= line
[skiplen
:].split('&view=2.0')
797 description
= _name
.split('</a>')[1].strip()
798 self
.database_cache
.insert_asn_description(asn
, source_name
,
802 self
.database_cache
.commit_changes()
803 asn_descriptions
.close()
805 def parse_asn_assignment_files(self
, asn_assignment_urls
=None):
806 if not asn_assignment_urls
:
807 asn_assignment_urls
= self
.ASN_ASSIGNMENT_URLS
808 self
.database_cache
.delete_asn_assignments()
809 for asn_assignment_url
in asn_assignment_urls
:
810 asn_assignment_path
= os
.path
.join(
812 asn_assignment_url
.split('/')[-1])
813 if not os
.path
.exists(asn_assignment_path
):
814 print("Unable to find %s." % asn_assignment_path
)
816 if asn_assignment_path
.endswith('.bz2'):
817 b
= bz2
.BZ2File(asn_assignment_path
)
819 if line
.startswith("*"):
821 netblock
, path
= l
[1], l
[6:-1]
822 nexthop
, metric
, locprf
, weight
= l
[
825 network
= ipaddr
.IPNetwork(netblock
)
826 # XXX add support for other sources too
827 source_type
= 'bgp_snapshot'
828 source_name
= 'routeviews'
830 if isinstance(network
, ipaddr
.IPv4Network
):
835 self
.database_cache
.insert_asn_assignment(
836 int(network
.network
),
837 int(network
.broadcast
),
846 def __init__(self
, cache_dir
, database_cache
, verbose
=False):
847 self
.cache_dir
= cache_dir
848 self
.database_cache
= database_cache
849 self
.verbose
= verbose
851 self
.build_country_code_dictionary()
853 def build_country_code_dictionary(self
):
854 """ Return a dictionary mapping country name to the country
856 country_code_path
= os
.path
.join(
858 'country_names_and_code_elements_txt-temp.htm')
859 if not os
.path
.exists(country_code_path
):
862 for line
in open(country_code_path
):
863 if line
== "" or line
.startswith("Country ") or ";" not in line
:
865 country_name
, country_code
= line
.strip().split(";")
866 country_name
= ' '.join([part
.capitalize() for part
in
867 country_name
.split(" ")])
868 self
.map_co
[country_name
] = country_code
870 def knows_country_names(self
):
871 return self
.map_co
is not None
873 def get_name_from_country_code(self
, cc_code
):
874 if not self
.knows_country_names():
876 country_name
= [(key
, value
) for (key
, value
) in
877 list(self
.map_co
.items()) if value
== cc_code
]
878 if len(country_name
) > 0:
879 return country_name
[0][0]
881 def get_country_code_from_name(self
, country_name
):
882 """ Return the country code for a given country name. """
883 if not self
.knows_country_names():
885 cc_code
= [self
.map_co
[key
] for key
in list(self
.map_co
.keys()) if
886 key
.upper().startswith(country_name
.upper())]
890 def lookup_ipv6_address(self
, lookup_ipaddr
):
891 print("Reverse lookup for: " + str(lookup_ipaddr
))
892 for source_type
in ['maxmind', 'rir', 'lir']:
893 cc
= self
.database_cache
.fetch_country_code(
898 print(source_type
.upper(), "country code:", cc
)
899 cn
= self
.get_name_from_country_code(cc
)
901 print(source_type
.upper(), "country name:", cn
)
903 def lookup_ipv4_address(self
, lookup_ipaddr
):
904 print("Reverse lookup for: " + str(lookup_ipaddr
))
905 maxmind_cc
= self
.database_cache
.fetch_country_code('ipv4', 'maxmind',
908 print('MaxMind country code:', maxmind_cc
)
909 maxmind_cn
= self
.get_name_from_country_code(maxmind_cc
)
911 print('MaxMind country name:', maxmind_cn
)
912 rir_cc
= self
.database_cache
.fetch_country_code('ipv4', 'rir',
915 print('RIR country code:', rir_cc
)
916 rir_cn
= self
.get_name_from_country_code(rir_cc
)
918 print('RIR country name:', rir_cn
)
920 print('Not found in RIR db')
921 lir_cc
= self
.database_cache
.fetch_country_code('ipv4', 'lir',
924 print('LIR country code:', lir_cc
)
925 lir_cn
= self
.get_name_from_country_code(lir_cc
)
927 print('LIR country name:', lir_cn
)
928 if maxmind_cc
and maxmind_cc
!= rir_cc
:
929 print("It appears that the RIR data conflicts with MaxMind's "
930 "data. MaxMind's data is likely closer to being "
931 "correct due to sub-delegation issues with LIR databases.")
933 def lookup_ip_address(self
, lookup_str
):
934 """ Return the country code and name for a given ip address. """
936 lookup_ipaddr
= ipaddr
.ip_address(lookup_str
)
937 if isinstance(lookup_ipaddr
, ipaddr
.IPv4Address
):
938 self
.lookup_ipv4_address(lookup_ipaddr
)
939 elif isinstance(lookup_ipaddr
, ipaddr
.IPv6Address
):
940 self
.lookup_ipv6_address(lookup_ipaddr
)
942 print(("Did not recognize '%s' as either IPv4 or IPv6 "
943 "address." % lookup_str
))
944 except ValueError as e
:
945 print("'%s' is not a valid IP address." % lookup_str
)
947 def asn_lookup(self
, asn
):
948 asn_cc
= self
.database_cache
.fetch_country_code('asn', 'rir', asn
)
950 print("AS country code: %s" % asn_cc
)
951 asn_cn
= self
.get_name_from_country_code(asn_cc
)
953 print("AS country name: %s" % asn_cn
)
955 print("AS%s not found!" % asn
)
957 def fetch_rir_blocks_by_country(self
, request
, country
):
959 for (start_num
, end_num
) in \
960 self
.database_cache
.fetch_assignments(request
, country
):
961 if request
== "ipv4" or request
== "ipv6":
962 start_ipaddr
= ipaddr
.ip_address(start_num
)
963 end_ipaddr
= ipaddr
.ip_address(end_num
)
964 result
+= [str(x
) for x
in
965 ipaddr
.summarize_address_range(
966 start_ipaddr
, end_ipaddr
)]
968 result
.append(str(start_num
))
971 def lookup_countries_in_different_source(self
, first_country_code
):
972 """ Look up all assignments matching the given country code, then
973 look up to which country code(s) the same number ranges are
974 assigned in other source types. Print out the result showing
975 similarities and differences. """
977 " '<' = found assignment range with country code '%s'\n"
978 " '>' = overlapping assignment range with same country code\n"
979 " '*' = overlapping assignment range, first conflict\n"
980 " '#' = overlapping assignment range, second conflict and "
981 "beyond\n ' ' = neighboring assignment range") % (
982 first_country_code
, ))
983 results
= self
.database_cache
.fetch_country_blocks_in_other_sources(
985 prev_first_source_type
= ''
986 prev_first_start_num
= -1
987 cur_second_country_codes
= []
988 for (first_source_type
, first_start_num
, first_end_num
,
989 second_source_type
, second_start_num
, second_end_num
,
990 second_country_code
, num_type
) in results
:
991 if first_source_type
!= prev_first_source_type
:
992 print("\nAssignments in '%s':" % (first_source_type
, ))
993 prev_first_source_type
= first_source_type
994 if first_start_num
!= prev_first_start_num
:
995 cur_second_country_codes
= []
997 prev_first_start_num
= first_start_num
999 if second_end_num
>= first_start_num
and \
1000 second_start_num
<= first_end_num
:
1001 if first_country_code
!= second_country_code
and \
1002 second_country_code
not in cur_second_country_codes
:
1003 cur_second_country_codes
.append(second_country_code
)
1004 if first_source_type
== second_source_type
:
1006 elif len(cur_second_country_codes
) == 0:
1008 elif len(cur_second_country_codes
) == 1:
1012 if num_type
.startswith("ip") and \
1013 second_start_num
== second_end_num
:
1014 second_range
= "%s" % (ipaddr
.ip_address(second_start_num
), )
1015 elif num_type
.startswith("ip") and \
1016 second_start_num
< second_end_num
:
1017 second_range
= "%s-%s" % (ipaddr
.ip_address(second_start_num
),
1018 ipaddr
.ip_address(second_end_num
))
1019 elif second_start_num
< second_end_num
:
1020 second_range
= "AS%d-%d" % (second_start_num
, second_end_num
)
1022 second_range
= "AS%d" % (second_start_num
, )
1023 print("%1s %s %s %s" % (marker
, second_country_code
, second_range
,
1024 second_source_type
, ))
1026 def _get_network_string_from_range(self
, end
, start
, bits
=32):
1027 start
, end
= int(start
, 16), int(end
, 16)
1028 netbits
= bits
- int(log(end
- start
, 2))
1029 return ipaddr
.IPNetwork("%s/%d" % (ipaddr
.IPAddress(start
), netbits
))
1031 def lookup_org_by_ip(self
, lookup_str
):
1032 """ Return the ASN and AS Description by IP """
1034 lookup_ipaddr
= ipaddr
.IPAddress(lookup_str
)
1035 if isinstance(lookup_ipaddr
, ipaddr
.IPv4Address
):
1038 elif isinstance(lookup_ipaddr
, ipaddr
.IPv6Address
):
1043 rs
= self
.database_cache
.fetch_org_by_ip_address(
1044 lookup_ipaddr
, num_type
)
1046 network
= self
._get
_network
_string
_from
_range
(
1047 r
[3], r
[2], bits
=len_bits
)
1048 print("%s in %s announced by AS%s - %s" %
1049 (lookup_str
, network
, r
[0], r
[1]))
1051 print("'%s' is not a valid IP address." % lookup_str
)
1053 print("Did not find any matching announcements containing %s." %
1056 def lookup_org_by_range(self
, start_range
, end_range
):
1057 output_str
= "%s announced by AS%s - %s"
1059 a
= ipaddr
.IPAddress(start_range
)
1060 b
= ipaddr
.IPAddress(end_range
)
1061 if isinstance(a
, ipaddr
.IPv4Address
) and isinstance(
1062 b
, ipaddr
.IPv4Address
):
1065 elif isinstance(a
, ipaddr
.IPv6Address
) and (
1066 isinstance(b
, ipaddr
.IPv6Address
)):
1071 rs
= self
.database_cache
.fetch_org_by_ip_range(
1072 min(a
, b
), max(a
, b
), num_type
)
1074 network
= self
._get
_network
_string
_from
_range
(
1075 r
[3], r
[2], bits
=len_bits
)
1076 print(output_str
% (network
, r
[0], r
[1]))
1078 print("%s %s is not a valid IP range." % (start_range
, end_range
))
1080 print("Did not find any matching announcements in range %s %s." %
1081 (start_range
, end_range
))
1084 def split_callback(option
, opt
, value
, parser
):
1085 split_value
= value
.split(':')
1086 setattr(parser
.values
, option
.dest
, split_value
[0])
1087 if len(split_value
) > 1 and split_value
[1] != '':
1088 setattr(parser
.values
, 'type_filter', split_value
[1])
1091 def normalize_country_code(country_code
):
1092 """ Normalize country codes a bit by making capitalization consistent and
1093 removing trailing comments (and other words). """
1094 if not country_code
:
1096 country_code
= re
.match(r
'^(\w+)', country_code
).group(1)
1097 return country_code
.upper()
1101 """ Where the magic starts. """
1102 usage
= ("Usage: %prog [options]\n\n"
1103 "Example: %prog -v -t mm")
1104 parser
= optparse
.OptionParser(usage
)
1105 parser
.add_option("-v", "--verbose", action
="store_true",
1106 dest
="verbose", help="be verbose", default
=False)
1107 parser
.add_option("-c", "--cache-dir", action
="store", dest
="dir",
1108 help="set cache directory [default: %default]",
1109 default
=str(os
.path
.expanduser('~')) + "/.blockfinder/")
1110 parser
.add_option("--user-agent", action
="store", dest
="ua",
1111 help=('provide a User-Agent which will be used when '
1112 'fetching delegation files [default: "%default"]'),
1113 default
=("Mozilla/5.0 (Windows NT 6.1; rv:17.0) "
1114 "Gecko/20100101 Firefox/17.0"))
1115 parser
.add_option("-x", "--hack-the-internet", action
="store_true",
1116 dest
="hack_the_internet", help=optparse
.SUPPRESS_HELP
)
1117 group
= optparse
.OptionGroup(
1120 "Pick at most one of these modes to initialize or update "
1121 "the local cache. May not be combined with lookup modes.")
1125 action
="store_true",
1126 dest
="init_maxmind",
1127 help="initialize or update MaxMind GeoIP database")
1131 action
="store_true",
1132 dest
="reload_maxmind",
1133 help=("update cache from existing MaxMind GeoIP database"))
1138 dest
="import_maxmind",
1140 help=("import the specified MaxMind GeoIP database file into "
1141 "the database cache using its file name as source "
1143 group
.add_option("-i", "--init-rir",
1144 action
="store_true", dest
="init_del",
1145 help="initialize or update delegation information")
1149 action
="store_true",
1151 help="use existing delegation files to update the database")
1155 action
="store_true",
1157 help=("initialize or update lir information; can take up to "
1162 action
="store_true",
1164 help=("use existing lir files to update the database; can "
1165 "take up to 5 minutes"))
1169 action
="store_true",
1171 help="download country codes file")
1175 action
="store_true",
1177 help="erase the local database cache")
1180 "--init-asn-descriptions",
1181 action
="store_true",
1182 dest
="init_asn_descriptions",
1183 help=("initialize or update asn description information"))
1186 "--reload-asn-descriptions",
1187 action
="store_true",
1188 dest
="reload_asn_descriptions",
1189 help=("Use existing asn descriptions to update database"))
1192 "--init-asn-assignments",
1193 action
="store_true",
1194 dest
="init_asn_assignments",
1195 help=("initialize or update asn assignment information"))
1198 "--reload-asn-assignments",
1199 action
="store_true",
1200 dest
="reload_asn_assignments",
1201 help=("Use existing asn assignments to update database"))
1202 parser
.add_option_group(group
)
1203 group
= optparse
.OptionGroup(
1204 parser
, "Lookup modes",
1205 "Pick at most one of these modes to look up data in the "
1206 "local cache. May not be combined with cache modes.")
1212 help=("look up country code and name for the specified IPv4 "
1219 help=("look up country code and name for the specified IPv6 "
1226 help="look up country code and name for the specified ASN")
1232 callback
=split_callback
,
1233 metavar
="CC[:type]",
1235 help=("look up all allocations (or only those for number "
1236 "type 'ipv4', 'ipv6', or 'asn' if provided) in the "
1237 "delegation cache for the specified two-letter country "
1244 callback
=split_callback
,
1245 metavar
="CN[:type]",
1247 help=("look up all allocations (or only those for number "
1248 "type 'ipv4', 'ipv6', or 'asn' if provided) in the "
1249 "delegation cache for the specified full country "
1257 help=("compare assignments to the specified country code "
1258 "with overlapping assignments in other data "
1259 "sources; can take some time and produce some "
1266 help=("look up country name for specified country code"))
1268 "--lookup-org-by-ip",
1269 "--lookup-org-by-ip",
1271 dest
="lookup_org_by_ip",
1272 help=("look up ASN and AS Description for an IP address"))
1274 "--lookup-org-by-range",
1275 "--lookup-org-by-range",
1276 action
="store_true",
1277 dest
="lookup_org_by_range",
1278 help=("look up announced networks in a range of addresses; "
1279 "requires --range-start and --range-end to be set"))
1285 help=("Specify the start of a range of addresses"))
1287 "--range-end", "--range-end",
1290 help=("Specify the end of a range of addresses"))
1291 parser
.add_option_group(group
)
1292 group
= optparse
.OptionGroup(parser
, "Export modes")
1296 action
="store_true",
1298 help=("export the lookup database to GeoIPCountryWhois.csv and "
1299 "v6.csv files in the format used to build the debian "
1300 "package geoip-database"))
1305 dest
="geoip_v4_filename",
1306 help=("The filename to write the IPv4 GeoIP dataset to"))
1311 dest
="geoip_v6_filename",
1312 help=("The filename to write the IPv6 GeoIP dataset to"))
1317 dest
="geoip_asn_filename",
1318 help=("The filename to write the IPv4 GeoIP ASNum dataset to"))
1319 parser
.add_option_group(group
)
1321 group
= optparse
.OptionGroup(parser
, "Network modes")
1322 (options
, args
) = parser
.parse_args()
1323 if options
.hack_the_internet
:
1324 print("all your bases are belong to us!")
1326 options_dict
= vars(options
)
1328 for mode
in ["init_maxmind", "reload_maxmind", "import_maxmind",
1329 "init_del", "init_lir", "reload_del", "reload_lir",
1330 "download_cc", "erase_cache", "ipv4", "ipv6", "asn",
1331 "cc", "cn", "compare", "what_cc", "init_asn_descriptions",
1332 "reload_asn_descriptions", "init_asn_assignments",
1333 "reload_asn_assignments", "lookup_org_by_ip",
1334 "lookup_org_by_range", "export"]:
1335 if mode
in options_dict
and options_dict
.get(mode
):
1338 parser
.error("only 1 cache or lookup mode allowed")
1340 parser
.error("must provide 1 cache or lookup mode")
1341 database_cache
= DatabaseCache(options
.dir, options
.verbose
)
1342 if options
.erase_cache
:
1343 database_cache
.erase_database()
1345 if not database_cache
.connect_to_database():
1346 print("Could not connect to database.")
1347 print("You may need to erase it using -e and then reload it "
1348 "using -d/-z. Exiting.")
1350 database_cache
.set_db_version()
1351 downloader_parser
= DownloaderParser(options
.dir, database_cache
,
1353 lookup
= Lookup(options
.dir, database_cache
)
1354 if options
.ipv4
or options
.ipv6
or options
.asn
or options
.cc \
1355 or options
.cn
or options
.compare
:
1356 if downloader_parser
.check_rir_file_mtimes():
1357 print("Your cached RIR files are older than 24 hours; you "
1358 "probably want to update them.")
1360 lookup
.asn_lookup(options
.asn
)
1361 elif options
.lookup_org_by_ip
:
1362 lookup
.lookup_org_by_ip(options
.lookup_org_by_ip
)
1363 elif options
.lookup_org_by_range
:
1364 if not (options
.range_start
and options
.range_end
):
1365 print("You must specify the start and end addresses; "
1366 "see --range-start and --range-end")
1368 lookup
.lookup_org_by_range(options
.range_start
, options
.range_end
)
1370 lookup
.lookup_ip_address(options
.ipv4
)
1372 lookup
.lookup_ip_address(options
.ipv6
)
1373 elif options
.cc
or options
.cn
or options
.what_cc
:
1376 country
= options
.cc
.upper()
1377 elif not lookup
.knows_country_names():
1378 print("Need to download country codes first before looking "
1379 "up countries by name.")
1380 elif options
.what_cc
:
1381 country
= options
.what_cc
.upper()
1382 country_name
= lookup
.get_name_from_country_code(country
)
1384 print(("Hmm...%s? That would be %s."
1385 % (options
.what_cc
, country_name
)))
1388 print(("Hmm, %s? We're not sure either. Are you sure that's "
1389 "a country code?" % options
.what_cc
))
1392 country
= lookup
.get_country_code_from_name(options
.cn
)
1394 print("It appears your search did not match a country.")
1396 types
= ["ipv4", "ipv6", "asn"]
1397 if hasattr(options
, 'type_filter') and \
1398 options
.type_filter
.lower() in types
:
1399 types
= [options
.type_filter
.lower()]
1400 for request
in types
:
1401 print("\n".join(lookup
.fetch_rir_blocks_by_country(
1403 elif options
.compare
:
1404 print("Comparing assignments with overlapping assignments in other "
1406 lookup
.lookup_countries_in_different_source(options
.compare
)
1407 elif options
.init_maxmind
or options
.reload_maxmind
:
1408 if options
.init_maxmind
:
1409 print("Downloading Maxmind GeoIP files...")
1410 downloader_parser
.download_maxmind_files()
1411 print("Importing Maxmind GeoIP files...")
1412 downloader_parser
.parse_maxmind_files()
1413 elif options
.import_maxmind
:
1414 print("Importing Maxmind GeoIP files...")
1415 downloader_parser
.import_maxmind_file(options
.import_maxmind
)
1416 elif options
.init_del
or options
.reload_del
:
1417 if options
.init_del
:
1418 print("Downloading RIR files...")
1419 downloader_parser
.download_rir_files()
1420 print("Verifying RIR files...")
1421 downloader_parser
.verify_rir_files()
1422 print("Importing RIR files...")
1423 downloader_parser
.parse_rir_files()
1424 elif options
.init_lir
or options
.reload_lir
:
1425 if options
.init_lir
:
1426 print("Downloading LIR delegation files...")
1427 downloader_parser
.download_lir_files()
1428 print("Importing LIR files...")
1429 downloader_parser
.parse_lir_files()
1430 elif options
.download_cc
:
1431 print("Downloading country code file...")
1432 downloader_parser
.download_country_code_file()
1433 elif options
.init_asn_descriptions
or options
.reload_asn_descriptions
:
1434 if options
.init_asn_descriptions
:
1435 print("Downloading ASN Descriptions...")
1436 downloader_parser
.download_asn_description_file()
1437 print("Importing ASN Descriptions...")
1438 downloader_parser
.parse_asn_description_file()
1439 elif options
.init_asn_assignments
or options
.reload_asn_assignments
:
1440 if options
.init_asn_assignments
:
1441 print("Downloading ASN Assignments...")
1442 downloader_parser
.download_asn_assignment_files()
1443 print("Importing ASN Assignments...")
1444 downloader_parser
.parse_asn_assignment_files()
1445 elif options
.export
:
1446 v4_file
= options
.geoip_v4_filename
or "GeoIPCountryWhois.csv"
1447 v6_file
= options
.geoip_v6_filename
or "v6.csv"
1448 asn_file
= options
.geoip_asn_filename
or "GeoIPASNum.csv"
1449 print("Exporting GeoIP IPv4 to %s" % v4_file
)
1450 database_cache
.export_geoip(lookup
, v4_file
, 'ipv4')
1451 print("Exporting GeoIP IPv6 to %s" % v6_file
)
1452 database_cache
.export_geoip(lookup
, v6_file
, 'ipv6')
1453 print("Exporting GeoIP IPv4 ASNum to %s" % asn_file
)
1454 database_cache
.export_asn(asn_file
, 'ipv4')
1456 # print("Exporting GeoIP IPv6 ASNum to %s" % asn_file)
1457 # database_cache.export_geoip(asn_file, 'ipv6')
1458 database_cache
.commit_and_close_database()
1460 if __name__
== "__main__":