2 # -*- coding: utf-8 -*-
4 # For the people of Smubworld!
17 if sys
.version_info
[0] >= 3:
18 from configparser
import ConfigParser
19 import ipaddress
as ipaddr
20 from urllib
.request
import (urlopen
, Request
)
21 from urllib
.error
import URLError
24 from ConfigParser
import SafeConfigParser
as ConfigParser
25 from urllib2
import (urlopen
, Request
, URLError
)
27 from embedded_ipaddr
import ipaddr
28 ipaddr
.ip_address
= ipaddr
.IPAddress
31 ipaddr
.ip_address
= ipaddr
.IPAddress
33 is_win32
= (sys
.platform
== "win32")
35 __program__
= 'blockfinder'
36 __url__
= 'https://github.com/ioerror/blockfinder/'
37 __author__
= 'Jacob Appelbaum <jacob@appelbaum.net>, David <db@d1b.org>'
38 __copyright__
= 'Copyright (c) 2010'
39 __license__
= 'See LICENSE for licensing information'
40 __version__
= '3.14159'
43 from future
import antigravity
48 class DatabaseCache(object):
50 def __init__(self
, cache_dir
, verbose
=False):
51 self
.cache_dir
= cache_dir
52 self
.verbose
= verbose
55 self
.db_version
= "0.0.4"
56 self
.db_path
= os
.path
.join(self
.cache_dir
+ "sqlitedb")
58 def erase_database(self
):
59 """ Erase the database file. """
60 if os
.path
.exists(self
.db_path
):
61 os
.remove(self
.db_path
)
63 def connect_to_database(self
):
64 """ Connect to the database cache, possibly after creating it if
65 it doesn't exist yet, or after making sure an existing
66 database cache has the correct version. Return True if a
67 connection could be established, False otherwise. """
68 if not os
.path
.exists(self
.cache_dir
):
70 print("Initializing the cache directory...")
71 os
.mkdir(self
.cache_dir
)
72 if os
.path
.exists(self
.db_path
):
73 cache_version
= self
.get_db_version()
75 cache_version
= "0.0.1"
76 if cache_version
!= self
.db_version
:
77 print(("The existing database cache uses version %s, "
78 "not the expected %s." % (cache_version
,
81 self
.conn
= sqlite3
.connect(self
.db_path
)
82 self
.cursor
= self
.conn
.cursor()
83 self
.create_assignments_table()
84 self
.create_asn_description_table()
85 self
.create_asn_assignments_table()
88 def __get_default_config_file_obj(self
):
90 file_path
= os
.path
.join(self
.cache_dir
, 'db.cfg')
91 if not os
.path
.exists(file_path
):
93 return open(file_path
, open_flags
)
95 def _get_db_config(self
, file_obj
=None):
96 """ Return the database configuration object from the provided
97 file_obj if provided, otherwise from the default database
98 configuration file. """
100 file_obj
= self
.__get
_default
_config
_file
_obj
()
101 config
= ConfigParser()
102 if sys
.version_info
[0] >= 3:
103 config
.read_file(file_obj
)
105 config
.readfp(file_obj
)
109 def set_db_version(self
, file_obj
=None):
110 """ Set the database version string in the config file. """
112 file_obj
= self
.__get
_default
_config
_file
_obj
()
113 config
= self
._get
_db
_config
()
114 if not config
.has_section('db'):
115 config
.add_section('db')
116 config
.set('db', 'version', self
.db_version
)
117 config
.write(file_obj
)
120 def get_db_version(self
):
121 """ Read and return the database version string from the config
123 config
= self
._get
_db
_config
()
124 if not config
.has_section('db'):
126 return config
.get('db', 'version')
128 def commit_and_close_database(self
):
132 def create_assignments_table(self
):
133 """ Create the assignments table that stores all assignments from
134 IPv4/IPv6/ASN to country code. Blocks are stored as first hex
135 of and first hex after the assignment. Numbers are stored
136 as hex strings, because SQLite's INTEGER type only holds up to
137 63 unsigned bits, which is not enough to store a /64 IPv6
138 block. Hex strings have leading zeros, with IPv6 addresses
139 being 33 hex characters long and IPv4 addresses and ASN being
140 9 hex characters long. The first number after an assignment
141 range is stored instead of the last number in the range to
142 facilitate comparisons with neighboring ranges. """
143 sql
= ('CREATE TABLE IF NOT EXISTS assignments(start_hex TEXT, '
144 'next_start_hex TEXT, num_type TEXT, country_code TEXT, '
145 'source_type TEXT, source_name TEXT)')
146 self
.cursor
.execute(sql
)
149 def create_asn_description_table(self
):
150 """ Create the assignments table that stores all the descriptions
151 associated with ASNs. """
152 sql
= ('CREATE TABLE IF NOT EXISTS asn_descriptions(as_num INT, '
153 'source_name TEXT, description TEXT)')
154 self
.cursor
.execute(sql
)
155 sql
= ('CREATE INDEX IF NOT EXISTS DescriptionsByASN ON '
156 'asn_descriptions ( as_num )')
157 self
.cursor
.execute(sql
)
160 def create_asn_assignments_table(self
):
161 """ Create the assignments table that stores the assignments from
163 # XXX: IPv6 not yet supported. (Not available from routeviews?)
164 sql
= ('CREATE TABLE IF NOT EXISTS asn_assignments(start_hex TEXT, '
165 'next_start_hex TEXT, num_type TEXT, as_num INT, '
166 'source_type TEXT, source_name TEXT, PRIMARY KEY(start_hex, '
168 self
.cursor
.execute(sql
)
169 sql
= ('CREATE INDEX IF NOT EXISTS ASNEntriesByStartHex on '
170 'asn_assignments ( start_hex )')
171 self
.cursor
.execute(sql
)
174 def delete_assignments(self
, source_type
):
175 """ Delete all assignments from the database cache matching a
176 given source type ("rir", "lir", etc.). """
177 sql
= 'DELETE FROM assignments WHERE source_type = ?'
178 self
.cursor
.execute(sql
, (source_type
, ))
181 def delete_asn_descriptions(self
):
182 """ Delete all asn descriptions from the database cache. """
183 sql
= 'DELETE FROM asn_descriptions'
184 self
.cursor
.execute(sql
)
187 def delete_asn_assignments(self
):
188 """ Delete all the bgp netblock to as entries """
189 sql
= 'DELETE FROM asn_assignments'
190 self
.cursor
.execute(sql
)
193 def insert_assignment(self
, start_num
, end_num
, num_type
,
194 country_code
, source_type
, source_name
):
195 """ Insert an assignment into the database cache, without
196 commiting after the insertion. """
197 sql
= ('INSERT INTO assignments (start_hex, next_start_hex, '
198 'num_type, country_code, source_type, source_name) '
199 'VALUES (?, ?, ?, ?, ?, ?)')
200 if num_type
== 'ipv6':
201 start_hex
= '%033x' % start_num
202 next_start_hex
= '%033x' % (end_num
+ 1)
204 start_hex
= '%09x' % start_num
205 next_start_hex
= '%09x' % (end_num
+ 1)
206 country_code
= normalize_country_code(country_code
)
207 self
.cursor
.execute(sql
, (start_hex
, next_start_hex
, num_type
,
208 country_code
, source_type
, source_name
))
210 def insert_asn_description(self
, asn
, source_name
, description
):
211 sql
= ('INSERT INTO asn_descriptions '
212 '(as_num, source_name, description) '
214 self
.cursor
.execute(sql
, (asn
, source_name
, unicode(description
)))
216 def insert_asn_assignment(self
, start_num
, end_num
, num_type
, asn
,
217 source_type
, source_name
):
218 # XXX: This is sqlite specific syntax
219 sql
= ('INSERT OR IGNORE INTO asn_assignments (start_hex, '
220 'next_start_hex, num_type, as_num, source_type, source_name) '
221 'VALUES (?, ?, ?, ?, ?, ?)')
222 if num_type
== 'ipv6':
223 start_hex
= '%033x' % start_num
224 next_start_hex
= '%033x' % (end_num
+ 1)
226 start_hex
= '%09x' % start_num
227 next_start_hex
= '%09x' % (end_num
+ 1)
228 self
.cursor
.execute(sql
, (start_hex
, next_start_hex
, num_type
, asn
,
229 source_type
, source_name
))
231 def commit_changes(self
):
232 """ Commit changes, e.g., after inserting assignments into the
236 def fetch_assignments(self
, num_type
, country_code
):
237 """ Fetch all assignments from the database cache matching the
238 given number type ("asn", "ipv4", or "ipv6") and country code.
239 The result is a sorted list of tuples containing (start_num,
241 sql
= ('SELECT start_hex, next_start_hex FROM assignments '
242 'WHERE num_type = ? AND country_code = ? '
243 'ORDER BY start_hex')
244 self
.cursor
.execute(sql
, (num_type
, country_code
))
246 for row
in self
.cursor
:
247 result
.append((long(row
[0], 16), long(row
[1], 16) - 1))
250 def fetch_country_code(self
, num_type
, source_type
, lookup_num
):
251 """ Fetch the country code from the database cache that is
252 assigned to the given number (e.g., IPv4 address in decimal
253 notation), number type (e.g., "ipv4"), and source type (e.g.,
255 sql
= ('SELECT country_code FROM assignments WHERE num_type = ? '
256 'AND source_type = ? AND start_hex <= ? '
257 'AND next_start_hex > ?')
258 if num_type
== 'ipv6':
259 lookup_hex
= '%033x' % long(lookup_num
)
261 lookup_hex
= '%09x' % long(lookup_num
)
262 self
.cursor
.execute(sql
, (num_type
, source_type
, lookup_hex
,
264 row
= self
.cursor
.fetchone()
268 def fetch_country_blocks_in_other_sources(self
, first_country_code
):
269 """ Fetch all assignments matching the given country code, then look
270 up to which country code(s) the same number ranges are assigned in
271 other source types. Return 8-tuples containing (1) first source
272 type, (2) first and (3) last number of the assignment in the first
273 source type, (4) second source type, (5) first and (6) last number
274 of the assignment in the second source type, (7) country code in
275 the second source type, and (8) number type. """
276 sql
= ('SELECT first.source_type, first.start_hex, '
277 'first.next_start_hex, second.source_type, '
278 'second.start_hex, second.next_start_hex, '
279 'second.country_code, first.num_type '
280 'FROM assignments AS first '
281 'JOIN assignments AS second '
282 'WHERE first.country_code = ? '
283 'AND first.start_hex <= second.next_start_hex '
284 'AND first.next_start_hex >= second.start_hex '
285 'AND first.num_type = second.num_type '
286 'ORDER BY first.source_type, first.start_hex, '
287 'second.source_type, second.start_hex')
288 self
.cursor
.execute(sql
, (first_country_code
, ))
290 for row
in self
.cursor
:
291 result
.append((str(row
[0]), long(row
[1], 16),
292 long(row
[2], 16) - 1, str(row
[3]), long(row
[4], 16),
293 long(row
[5], 16) - 1, str(row
[6]), str(row
[7])))
296 def fetch_org_by_ip_address(self
, lookup_str
, num_type
):
297 if num_type
== 'ipv4':
298 lookup_hex
= '%09x' % long(int(lookup_str
))
300 lookup_hex
= '%033x' % long(int(lookup_str
))
301 sql
= ('SELECT asn_descriptions.as_num, asn_descriptions.description, '
302 'asn_assignments.start_hex, asn_assignments.next_start_hex '
303 'FROM asn_descriptions JOIN asn_assignments ON '
304 'asn_assignments.as_num = asn_descriptions.as_num '
305 'WHERE num_type = ? AND start_hex <= ? AND next_start_hex > ?')
306 self
.cursor
.execute(sql
, (num_type
, lookup_hex
, lookup_hex
))
307 row
= self
.cursor
.fetchall()
311 def fetch_org_by_ip_range(self
, lookup_start
, lookup_end
, num_type
):
312 if num_type
== 'ipv4':
313 lookup_start_hex
= '%09x' % long(int(lookup_start
))
314 lookup_end_hex
= '%09x' % long(int(lookup_end
))
316 lookup_start_hex
= '%033x' % long(int(lookup_start
))
317 lookup_end_hex
= '%033x' % long(int(lookup_end
))
319 sql
= ('SELECT asn_descriptions.as_num, asn_descriptions.description, '
320 'asn_assignments.start_hex, asn_assignments.next_start_hex '
321 'FROM asn_descriptions JOIN asn_assignments ON '
322 'asn_assignments.as_num = asn_descriptions.as_num '
323 'WHERE num_type = ? AND start_hex >= ? AND next_start_hex <= ?')
324 self
.cursor
.execute(sql
, (num_type
, lookup_start_hex
, lookup_end_hex
))
325 row
= self
.cursor
.fetchall()
329 def _concatenate_and_write(
330 self
, records
, write_function
=None, record_filter
=None, bits
=32):
334 start_hex
, next_start_hex
, record
= \
335 long(row
[0], 16), long(row
[1], 16), str(row
[2])
336 nb
= bits
- int(log(next_start_hex
- start_hex
, 2))
337 net
= ipaddr
.IPNetwork("%s/%d" %
338 (ipaddr
.IPAddress(start_hex
), nb
))
339 if callable(record_filter
):
340 record
= record_filter(record
)
344 # Concatenate adjacent blocks of the same country
345 if netblocks
and netblocks
[-1][1] == record
:
346 pn
= netblocks
[-1][0]
347 nb
= bits
- int(log(int(net
.network
) +
348 int(net
.numhosts
) - int(pn
.network
), 2))
349 netblocks
[-1] = (ipaddr
.IPNetwork("%s/%d" %
350 (pn
.network
, nb
)), record
)
352 # if the adjacent blocks aren't the same country,
353 # write the last block out to csv and add the new block
354 # to the list for possible concatenation
356 prev_n
, prev_record
= netblocks
.pop()
358 write_function(prev_n
, prev_record
)
359 netblocks
.append((net
, record
))
361 # this is the base case
363 netblocks
.append((net
, record
))
365 def export_asn(self
, filename
, num_type
):
366 """ Export assignments to the CSV format used to build the
367 geoip-database asn lookup
369 sql
= ('SELECT start_hex, next_start_hex, as_num '
370 'FROM asn_assignments WHERE num_type = ? ORDER BY start_hex')
371 self
.cursor
.execute(sql
, (num_type
,))
373 f
= open(filename
, 'w')
375 print("Unable to open %s" % filename
)
378 def write_csv_line(network
, asn
):
380 f
.write(""""%s","%s","%d","%d","%s"\n""" % (network
.network
,
382 int(network
.network
),
383 int(network
.broadcast
),
385 if num_type
== 'ipv6':
387 elif num_type
== 'ipv4':
392 self
._concatenate
_and
_write
(self
.cursor
, write_function
=write_csv_line
,
396 def export_geoip(self
, lookup
, filename
, num_type
):
397 """ Export assignments to the CSV format used to build the
398 geoip-database package """
400 sql
= ('SELECT start_hex, next_start_hex, country_code '
401 'FROM assignments WHERE num_type = ? ORDER BY start_hex')
402 self
.cursor
.execute(sql
, (num_type
,))
405 f
= open(filename
, 'w')
407 print("Unable to open %s" % filename
)
410 def write_csv_line(network
, country_code
):
411 country_name
= lookup
.get_name_from_country_code(country_code
)
413 country_name
= country_name
.split(
414 "#")[0].strip() # Drop comments
415 f
.write(""""%s","%s","%d","%d","%s","%s"\n""" % (
418 int(network
.network
),
419 int(network
.broadcast
),
423 if num_type
== 'ipv6':
425 elif num_type
== 'ipv4':
430 self
._concatenate
_and
_write
(self
.cursor
, write_function
=write_csv_line
,
431 record_filter
=str.upper
, bits
=ip_bits
)
435 class DownloaderParser(object):
437 def __init__(self
, cache_dir
, database_cache
, user_agent
,
439 self
.cache_dir
= cache_dir
440 self
.database_cache
= database_cache
441 self
.user_agent
= user_agent
442 self
.verbose
= verbose
445 http://geolite.maxmind.com/download/geoip/database/GeoIPCountryCSV.zip
446 http://geolite.maxmind.com/download/geoip/database/GeoIPv6.csv.gz
450 ftp://ftp.arin.net/pub/stats/arin/delegated-arin-extended-latest
451 ftp://ftp.ripe.net/ripe/stats/delegated-ripencc-latest
452 ftp://ftp.afrinic.net/pub/stats/afrinic/delegated-afrinic-latest
453 ftp://ftp.apnic.net/pub/stats/apnic/delegated-apnic-latest
454 ftp://ftp.lacnic.net/pub/stats/lacnic/delegated-lacnic-latest
458 ftp://ftp.ripe.net/ripe/dbase/split/ripe.db.inetnum.gz
459 ftp://ftp.ripe.net/ripe/dbase/split/ripe.db.inet6num.gz
462 COUNTRY_CODE_URL
= ("http://www.iso.org/iso/home/standards/country_codes/"
463 "country_names_and_code_elements_txt-temp.htm")
465 ASN_DESCRIPTION_URL
= "http://www.cidr-report.org/as2.0/autnums.html"
467 ASN_ASSIGNMENT_URLS
= [
468 ('http://archive.routeviews.org/oix-route-views/'
469 'oix-full-snapshot-latest.dat.bz2'),
472 def download_maxmind_files(self
):
473 """ Download all LIR delegation urls. """
474 for maxmind_url
in self
.MAXMIND_URLS
.split():
475 self
._download
_to
_cache
_dir
(maxmind_url
)
477 def download_rir_files(self
):
478 """ Download all RIR delegation files including md5 checksum. """
479 for rir_url
in self
.RIR_URLS
.split():
480 rir_md5_url
= rir_url
+ '.md5'
481 self
._download
_to
_cache
_dir
(rir_url
)
482 self
._download
_to
_cache
_dir
(rir_md5_url
)
484 def download_lir_files(self
):
485 """ Download all LIR delegation urls. """
486 for lir_url
in self
.LIR_URLS
.split():
487 self
._download
_to
_cache
_dir
(lir_url
)
489 def download_country_code_file(self
):
490 """ Download and save the latest semicolon-separated open country
492 self
._download
_to
_cache
_dir
(self
.COUNTRY_CODE_URL
)
494 def download_asn_description_file(self
):
495 """ Download and save the latest ASN to Name report from
497 self
._download
_to
_cache
_dir
(self
.ASN_DESCRIPTION_URL
)
499 def download_asn_assignment_files(self
):
500 """ Download and save the latest routing snapshots. """
501 for assignment_url
in self
.ASN_ASSIGNMENT_URLS
:
502 self
._download
_to
_cache
_dir
(assignment_url
)
504 def _download_to_cache_dir(self
, url
):
505 """ Fetch a resource (with progress bar) and store contents to the
506 local cache directory under the file name given in the URL. """
507 if not os
.path
.exists(self
.cache_dir
):
509 print("Initializing the cache directory...")
510 os
.mkdir(self
.cache_dir
)
511 filename
= url
.split('/')[-1]
516 req
.add_header('User-Agent', self
.user_agent
)
517 # TODO Allow use of a proxy.
518 # req.set_proxy(host, type)
520 fetcher
= urlopen(req
)
521 except URLError
as err
:
522 msg
= "An error occurred while attempting to cache file from:"
523 print(("%s\n\t%s\n\t%s" % (msg
, url
, str(err
))))
525 length_header
= fetcher
.headers
.get("Content-Length")
528 expected_bytes
= int(length_header
)
529 print(("Fetching %d kilobytes" %
530 round(float(expected_bytes
/ 1024), 2)))
531 download_started
= time
.time()
532 output_file
= open(os
.path
.join(self
.cache_dir
, filename
), "wb")
533 received_bytes
, seconds_elapsed
= 0, 0
535 seconds_elapsed
= time
.time() - download_started
536 if expected_bytes
>= 0:
537 self
._update
_progress
_bar
(received_bytes
, expected_bytes
,
539 chunk
= fetcher
.read(1024)
541 if expected_bytes
>= 0 and received_bytes
!= expected_bytes
:
542 print(("Expected %s bytes, only received %s" %
543 (expected_bytes
, received_bytes
)))
546 received_bytes
+= len(chunk
)
547 output_file
.write(chunk
)
550 def _update_progress_bar(self
, received_bytes
, expected_bytes
,
552 """ Write a progress bar to the console. """
554 rows
= 100 # use some WinCon function for these?
555 columns
= 80 # but not really important.
558 rows
, columns
= list(map(int, os
.popen('stty size', 'r'
561 if seconds_elapsed
== 0:
563 percent_done
= float(received_bytes
) / float(expected_bytes
)
564 caption
= "%.2f K/s" % (received_bytes
/ 1024 / seconds_elapsed
)
565 width
= columns
- 4 - len(caption
)
566 sys
.stdout
.write("[%s>%s] %s%s" % (
567 "=" * int(percent_done
* width
),
568 "." * (width
- int(percent_done
* width
)), caption
, EOL
))
571 def check_rir_file_mtimes(self
):
572 """ Return True if the mtime of any RIR file in our cache directory
573 is > 24 hours, False otherwise. """
574 if not os
.path
.exists(self
.cache_dir
):
576 for rir_url
in self
.RIR_URLS
.split():
577 rir_path
= os
.path
.join(self
.cache_dir
,
578 rir_url
.split('/')[-1])
579 if os
.path
.exists(rir_path
):
580 rir_stat
= os
.stat(rir_path
)
581 if (time
.time() - rir_stat
.st_mtime
) > 86400:
585 def verify_rir_files(self
):
586 """ Compute md5 checksums of all RIR files, compare them to the
587 provided .md5 files, and return True if the two checksums match,
588 or False otherwise. """
589 for rir_url
in self
.RIR_URLS
.split():
590 rir_path
= os
.path
.join(self
.cache_dir
,
591 rir_url
.split('/')[-1])
592 rir_md5_path
= os
.path
.join(self
.cache_dir
,
593 rir_url
.split('/')[-1] + '.md5')
594 if not os
.path
.exists(rir_md5_path
) or \
595 not os
.path
.exists(rir_path
):
597 rir_md5_file
= open(rir_md5_path
, 'r')
598 expected_checksum
= rir_md5_file
.read()
600 if "=" in expected_checksum
:
601 expected_checksum
= expected_checksum
.split("=")[-1].strip()
602 elif expected_checksum
== "":
604 print("No checksum... skipping verification...")
607 regex
= re
.compile("[a-f0-9]{32}")
608 regres
= regex
.findall(expected_checksum
)
610 print("Error: mutiple checksum found")
611 elif len(regres
) < 1:
612 print("Error: no checksum found")
614 expected_checksum
= regres
[0]
615 computed_checksum
= ""
616 rir_file
= open(rir_path
, 'rb')
617 rir_data
= rir_file
.read()
619 computed_checksum
= str(hashlib
.md5(rir_data
).hexdigest())
620 if expected_checksum
!= computed_checksum
:
621 print(("The computed md5 checksum of %s, %s, does *not* "
622 "match the provided checksum %s!" %
623 (rir_path
, computed_checksum
, expected_checksum
)))
625 def parse_maxmind_files(self
, maxmind_urls
=None):
626 """ Parse locally cached MaxMind files and insert assignments to the
627 local database cache, overwriting any existing MaxMind
630 maxmind_urls
= self
.MAXMIND_URLS
.split()
631 self
.database_cache
.delete_assignments('maxmind')
632 for maxmind_url
in maxmind_urls
:
633 maxmind_path
= os
.path
.join(self
.cache_dir
,
634 maxmind_url
.split('/')[-1])
635 if not os
.path
.exists(maxmind_path
):
636 print("Unable to find %s." % maxmind_path
)
638 if maxmind_path
.endswith('.zip'):
639 maxmind_zip_path
= zipfile
.ZipFile(maxmind_path
)
640 for contained_filename
in maxmind_zip_path
.namelist():
641 content
= maxmind_zip_path
.read(contained_filename
)
642 self
._parse
_maxmind
_content
(content
, 'maxmind',
644 elif maxmind_path
.endswith('.gz'):
645 content
= gzip
.open(maxmind_path
).read()
646 self
._parse
_maxmind
_content
(content
, 'maxmind', 'maxmind')
647 self
.database_cache
.commit_changes()
649 def import_maxmind_file(self
, maxmind_path
):
650 self
.database_cache
.delete_assignments(maxmind_path
)
651 if not os
.path
.exists(maxmind_path
):
652 print("Unable to find %s." % maxmind_path
)
654 content
= open(maxmind_path
).read()
655 self
._parse
_maxmind
_content
(content
, maxmind_path
, maxmind_path
)
656 self
.database_cache
.commit_changes()
658 def _parse_maxmind_content(self
, content
, source_type
, source_name
):
659 keys
= ['start_str', 'end_str', 'start_num', 'end_num',
660 'country_code', 'country_name']
661 for line
in content
.decode('utf-8').split('\n'):
662 if len(line
.strip()) == 0 or line
.startswith("#"):
664 line
= line
.replace('"', '').replace(' ', '').strip()
665 parts
= line
.split(',')
666 entry
= dict((k
, v
) for k
, v
in zip(keys
, parts
))
667 start_num
= int(entry
['start_num'])
668 end_num
= int(entry
['end_num'])
669 country_code
= str(entry
['country_code'])
670 start_ipaddr
= ipaddr
.ip_address(entry
['start_str'])
671 if isinstance(start_ipaddr
, ipaddr
.IPv4Address
):
675 self
.database_cache
.insert_assignment(
682 def parse_rir_files(self
, rir_urls
=None):
683 """ Parse locally cached RIR files and insert assignments to the local
684 database cache, overwriting any existing RIR assignments. """
686 rir_urls
= self
.RIR_URLS
.split()
687 self
.database_cache
.delete_assignments('rir')
688 keys
= "registry country_code type start value date status"
689 for rir_url
in rir_urls
:
690 rir_path
= os
.path
.join(self
.cache_dir
,
691 rir_url
.split('/')[-1])
692 if not os
.path
.exists(rir_path
):
693 print("Unable to find %s." % rir_path
)
695 for line
in open(rir_path
, 'r'):
696 if line
.startswith("#"):
698 entry
= dict((k
, v
) for k
, v
in
699 zip(keys
.split(), line
.strip().split("|")))
700 source_name
= str(entry
['registry'])
701 country_code
= str(entry
['country_code'])
702 if source_name
.replace(
703 ".", "", 1).isdigit() or country_code
== "*":
705 num_type
= entry
['type']
706 if num_type
== 'asn':
707 start_num
= end_num
= int(entry
['start'])
708 elif num_type
== 'ipv4':
709 start_num
= int(ipaddr
.IPv4Address(entry
['start']))
710 end_num
= start_num
+ int(entry
['value']) - 1
711 elif num_type
== 'ipv6':
712 network_str
= entry
['start'] + '/' + entry
['value']
713 network_ipaddr
= ipaddr
.IPv6Network(network_str
)
714 start_num
= int(network_ipaddr
.network_address
)
715 end_num
= int(network_ipaddr
.broadcast_address
)
716 self
.database_cache
.insert_assignment(
723 self
.database_cache
.commit_changes()
725 def parse_lir_files(self
, lir_urls
=None):
726 """ Parse locally cached LIR files and insert assignments to the local
727 database cache, overwriting any existing LIR assignments. """
729 lir_urls
= self
.LIR_URLS
.split()
730 self
.database_cache
.delete_assignments('lir')
731 for lir_url
in lir_urls
:
732 lir_path
= os
.path
.join(self
.cache_dir
,
733 lir_url
.split('/')[-1])
734 if not os
.path
.exists(lir_path
):
735 print("Unable to find %s." % lir_path
)
737 if lir_path
.endswith('.gz'):
738 lir_file
= gzip
.open(lir_path
)
740 lir_file
= open(lir_path
)
746 for line
in lir_file
:
747 line
= line
.decode('utf-8', 'ignore').replace("\n", "")
750 start_num
, end_num
, country_code
, num_type
= 0, 0, "", ""
751 elif not entry
and "inetnum:" in line
:
753 line
= line
.replace("inetnum:", "").strip()
754 start_str
= line
.split("-")[0].strip()
755 end_str
= line
.split("-")[1].strip()
756 start_num
= int(ipaddr
.IPv4Address(start_str
))
757 end_num
= int(ipaddr
.IPv4Address(end_str
))
760 except Exception as e
:
763 elif not entry
and "inet6num:" in line
:
765 network_str
= line
.replace("inet6num:", "").strip()
766 network_ipaddr
= ipaddr
.IPv6Network(network_str
)
767 start_num
= int(network_ipaddr
.network_address
)
768 end_num
= int(network_ipaddr
.broadcast_address
)
771 except Exception as e
:
774 elif entry
and "country:" in line
:
775 country_code
= line
.replace("country:", "").strip()
776 self
.database_cache
.insert_assignment(
783 self
.database_cache
.commit_changes()
785 def parse_asn_description_file(self
, asn_description_url
=None):
786 """ Parse locally cached ASN to Description mappings and insert
787 mappings to the local database cache, overwriting any existing ASN
788 to Name assignments. """
789 if not asn_description_url
:
790 asn_description_url
= self
.ASN_DESCRIPTION_URL
791 self
.database_cache
.delete_asn_descriptions()
792 asn_description_path
= os
.path
.join(self
.cache_dir
,
793 asn_description_url
.split('/')[-1])
794 asn_descriptions
= open(asn_description_path
)
795 source_name
= 'cidr_report'
796 skiplen
= len('<a href="/cgi-bin/as-report?as=AS')
797 for line
in asn_descriptions
:
799 asn
, _name
= line
[skiplen
:].split('&view=2.0')
800 description
= _name
.split('</a>')[1].strip()
801 self
.database_cache
.insert_asn_description(asn
, source_name
,
805 self
.database_cache
.commit_changes()
806 asn_descriptions
.close()
808 def parse_asn_assignment_files(self
, asn_assignment_urls
=None):
809 if not asn_assignment_urls
:
810 asn_assignment_urls
= self
.ASN_ASSIGNMENT_URLS
811 self
.database_cache
.delete_asn_assignments()
812 for asn_assignment_url
in asn_assignment_urls
:
813 asn_assignment_path
= os
.path
.join(
815 asn_assignment_url
.split('/')[-1])
816 if not os
.path
.exists(asn_assignment_path
):
817 print("Unable to find %s." % asn_assignment_path
)
819 if asn_assignment_path
.endswith('.bz2'):
820 b
= bz2
.BZ2File(asn_assignment_path
)
822 if line
.startswith("*"):
824 netblock
, path
= l
[1], l
[6:-1]
825 nexthop
, metric
, locprf
, weight
= l
[
828 network
= ipaddr
.IPNetwork(netblock
)
829 # XXX add support for other sources too
830 source_type
= 'bgp_snapshot'
831 source_name
= 'routeviews'
833 if isinstance(network
, ipaddr
.IPv4Network
):
838 self
.database_cache
.insert_asn_assignment(
839 int(network
.network
),
840 int(network
.broadcast
),
847 class Lookup(object):
849 def __init__(self
, cache_dir
, database_cache
, verbose
=False):
850 self
.cache_dir
= cache_dir
851 self
.database_cache
= database_cache
852 self
.verbose
= verbose
854 self
.build_country_code_dictionary()
856 def build_country_code_dictionary(self
):
857 """ Return a dictionary mapping country name to the country
859 country_code_path
= os
.path
.join(
861 'country_names_and_code_elements_txt-temp.htm')
862 if not os
.path
.exists(country_code_path
):
865 for line
in open(country_code_path
):
866 if line
== "" or line
.startswith("Country ") or ";" not in line
:
868 country_name
, country_code
= line
.strip().split(";")
869 country_name
= ' '.join([part
.capitalize() for part
in
870 country_name
.split(" ")])
871 self
.map_co
[country_name
] = country_code
873 def knows_country_names(self
):
874 return self
.map_co
is not None
876 def get_name_from_country_code(self
, cc_code
):
877 if not self
.knows_country_names():
879 country_name
= [(key
, value
) for (key
, value
) in
880 list(self
.map_co
.items()) if value
== cc_code
]
881 if len(country_name
) > 0:
882 return country_name
[0][0]
884 def get_country_code_from_name(self
, country_name
):
885 """ Return the country code for a given country name. """
886 if not self
.knows_country_names():
888 cc_code
= [self
.map_co
[key
] for key
in list(self
.map_co
.keys()) if
889 key
.upper().startswith(country_name
.upper())]
893 def lookup_ipv6_address(self
, lookup_ipaddr
):
894 print("Reverse lookup for: " + str(lookup_ipaddr
))
895 for source_type
in ['maxmind', 'rir', 'lir']:
896 cc
= self
.database_cache
.fetch_country_code(
901 print(source_type
.upper(), "country code:", cc
)
902 cn
= self
.get_name_from_country_code(cc
)
904 print(source_type
.upper(), "country name:", cn
)
906 def lookup_ipv4_address(self
, lookup_ipaddr
):
907 print("Reverse lookup for: " + str(lookup_ipaddr
))
908 maxmind_cc
= self
.database_cache
.fetch_country_code('ipv4', 'maxmind',
911 print('MaxMind country code:', maxmind_cc
)
912 maxmind_cn
= self
.get_name_from_country_code(maxmind_cc
)
914 print('MaxMind country name:', maxmind_cn
)
915 rir_cc
= self
.database_cache
.fetch_country_code('ipv4', 'rir',
918 print('RIR country code:', rir_cc
)
919 rir_cn
= self
.get_name_from_country_code(rir_cc
)
921 print('RIR country name:', rir_cn
)
923 print('Not found in RIR db')
924 lir_cc
= self
.database_cache
.fetch_country_code('ipv4', 'lir',
927 print('LIR country code:', lir_cc
)
928 lir_cn
= self
.get_name_from_country_code(lir_cc
)
930 print('LIR country name:', lir_cn
)
931 if maxmind_cc
and maxmind_cc
!= rir_cc
:
932 print("It appears that the RIR data conflicts with MaxMind's "
933 "data. MaxMind's data is likely closer to being "
934 "correct due to sub-delegation issues with LIR databases.")
936 def lookup_ip_address(self
, lookup_str
):
937 """ Return the country code and name for a given ip address. """
939 lookup_ipaddr
= ipaddr
.ip_address(lookup_str
)
940 if isinstance(lookup_ipaddr
, ipaddr
.IPv4Address
):
941 self
.lookup_ipv4_address(lookup_ipaddr
)
942 elif isinstance(lookup_ipaddr
, ipaddr
.IPv6Address
):
943 self
.lookup_ipv6_address(lookup_ipaddr
)
945 print(("Did not recognize '%s' as either IPv4 or IPv6 "
946 "address." % lookup_str
))
947 except ValueError as e
:
948 print("'%s' is not a valid IP address." % lookup_str
)
950 def asn_lookup(self
, asn
):
951 asn_cc
= self
.database_cache
.fetch_country_code('asn', 'rir', asn
)
953 print("AS country code: %s" % asn_cc
)
954 asn_cn
= self
.get_name_from_country_code(asn_cc
)
956 print("AS country name: %s" % asn_cn
)
958 print("AS%s not found!" % asn
)
960 def fetch_rir_blocks_by_country(self
, request
, country
):
962 for (start_num
, end_num
) in \
963 self
.database_cache
.fetch_assignments(request
, country
):
964 if request
== "ipv4" or request
== "ipv6":
965 start_ipaddr
= ipaddr
.ip_address(start_num
)
966 end_ipaddr
= ipaddr
.ip_address(end_num
)
967 result
+= [str(x
) for x
in
968 ipaddr
.summarize_address_range(
969 start_ipaddr
, end_ipaddr
)]
971 result
.append(str(start_num
))
974 def lookup_countries_in_different_source(self
, first_country_code
):
975 """ Look up all assignments matching the given country code, then
976 look up to which country code(s) the same number ranges are
977 assigned in other source types. Print out the result showing
978 similarities and differences. """
980 " '<' = found assignment range with country code '%s'\n"
981 " '>' = overlapping assignment range with same country code\n"
982 " '*' = overlapping assignment range, first conflict\n"
983 " '#' = overlapping assignment range, second conflict and "
984 "beyond\n ' ' = neighboring assignment range") % (
985 first_country_code
, ))
986 results
= self
.database_cache
.fetch_country_blocks_in_other_sources(
988 prev_first_source_type
= ''
989 prev_first_start_num
= -1
990 cur_second_country_codes
= []
991 for (first_source_type
, first_start_num
, first_end_num
,
992 second_source_type
, second_start_num
, second_end_num
,
993 second_country_code
, num_type
) in results
:
994 if first_source_type
!= prev_first_source_type
:
995 print("\nAssignments in '%s':" % (first_source_type
, ))
996 prev_first_source_type
= first_source_type
997 if first_start_num
!= prev_first_start_num
:
998 cur_second_country_codes
= []
1000 prev_first_start_num
= first_start_num
1002 if second_end_num
>= first_start_num
and \
1003 second_start_num
<= first_end_num
:
1004 if first_country_code
!= second_country_code
and \
1005 second_country_code
not in cur_second_country_codes
:
1006 cur_second_country_codes
.append(second_country_code
)
1007 if first_source_type
== second_source_type
:
1009 elif len(cur_second_country_codes
) == 0:
1011 elif len(cur_second_country_codes
) == 1:
1015 if num_type
.startswith("ip") and \
1016 second_start_num
== second_end_num
:
1017 second_range
= "%s" % (ipaddr
.ip_address(second_start_num
), )
1018 elif num_type
.startswith("ip") and \
1019 second_start_num
< second_end_num
:
1020 second_range
= "%s-%s" % (ipaddr
.ip_address(second_start_num
),
1021 ipaddr
.ip_address(second_end_num
))
1022 elif second_start_num
< second_end_num
:
1023 second_range
= "AS%d-%d" % (second_start_num
, second_end_num
)
1025 second_range
= "AS%d" % (second_start_num
, )
1026 print("%1s %s %s %s" % (marker
, second_country_code
, second_range
,
1027 second_source_type
, ))
1029 def _get_network_string_from_range(self
, end
, start
, bits
=32):
1030 start
, end
= int(start
, 16), int(end
, 16)
1031 netbits
= bits
- int(log(end
- start
, 2))
1032 return ipaddr
.IPNetwork("%s/%d" % (ipaddr
.IPAddress(start
), netbits
))
1034 def lookup_org_by_ip(self
, lookup_str
):
1035 """ Return the ASN and AS Description by IP """
1037 lookup_ipaddr
= ipaddr
.IPAddress(lookup_str
)
1038 if isinstance(lookup_ipaddr
, ipaddr
.IPv4Address
):
1041 elif isinstance(lookup_ipaddr
, ipaddr
.IPv6Address
):
1046 rs
= self
.database_cache
.fetch_org_by_ip_address(
1047 lookup_ipaddr
, num_type
)
1049 network
= self
._get
_network
_string
_from
_range
(
1050 r
[3], r
[2], bits
=len_bits
)
1051 print("%s in %s announced by AS%s - %s" %
1052 (lookup_str
, network
, r
[0], r
[1]))
1054 print("'%s' is not a valid IP address." % lookup_str
)
1056 print("Did not find any matching announcements containing %s." %
1059 def lookup_org_by_range(self
, start_range
, end_range
):
1060 output_str
= "%s announced by AS%s - %s"
1062 a
= ipaddr
.IPAddress(start_range
)
1063 b
= ipaddr
.IPAddress(end_range
)
1064 if isinstance(a
, ipaddr
.IPv4Address
) and isinstance(
1065 b
, ipaddr
.IPv4Address
):
1068 elif isinstance(a
, ipaddr
.IPv6Address
) and (
1069 isinstance(b
, ipaddr
.IPv6Address
)):
1074 rs
= self
.database_cache
.fetch_org_by_ip_range(
1075 min(a
, b
), max(a
, b
), num_type
)
1077 network
= self
._get
_network
_string
_from
_range
(
1078 r
[3], r
[2], bits
=len_bits
)
1079 print(output_str
% (network
, r
[0], r
[1]))
1081 print("%s %s is not a valid IP range." % (start_range
, end_range
))
1083 print("Did not find any matching announcements in range %s %s." %
1084 (start_range
, end_range
))
1087 def split_callback(option
, opt
, value
, parser
):
1088 split_value
= value
.split(':')
1089 setattr(parser
.values
, option
.dest
, split_value
[0])
1090 if len(split_value
) > 1 and split_value
[1] != '':
1091 setattr(parser
.values
, 'type_filter', split_value
[1])
1094 def normalize_country_code(country_code
):
1095 """ Normalize country codes a bit by making capitalization consistent and
1096 removing trailing comments (and other words). """
1097 if not country_code
:
1099 country_code
= re
.match(r
'^(\w+)', country_code
).group(1)
1100 return country_code
.upper()
1104 """ Where the magic starts. """
1105 usage
= ("Usage: %prog [options]\n\n"
1106 "Example: %prog -v -t mm")
1107 parser
= optparse
.OptionParser(usage
)
1108 parser
.add_option("-v", "--verbose", action
="store_true",
1109 dest
="verbose", help="be verbose", default
=False)
1110 parser
.add_option("-c", "--cache-dir", action
="store", dest
="dir",
1111 help="set cache directory [default: %default]",
1112 default
=str(os
.path
.expanduser('~')) + "/.blockfinder/")
1113 parser
.add_option("--user-agent", action
="store", dest
="ua",
1114 help=('provide a User-Agent which will be used when '
1115 'fetching delegation files [default: "%default"]'),
1116 default
=("Mozilla/5.0 (Windows NT 6.1; rv:17.0) "
1117 "Gecko/20100101 Firefox/17.0"))
1118 parser
.add_option("-x", "--hack-the-internet", action
="store_true",
1119 dest
="hack_the_internet", help=optparse
.SUPPRESS_HELP
)
1120 group
= optparse
.OptionGroup(
1123 "Pick at most one of these modes to initialize or update "
1124 "the local cache. May not be combined with lookup modes.")
1128 action
="store_true",
1129 dest
="init_maxmind",
1130 help="initialize or update MaxMind GeoIP database")
1134 action
="store_true",
1135 dest
="reload_maxmind",
1136 help=("update cache from existing MaxMind GeoIP database"))
1141 dest
="import_maxmind",
1143 help=("import the specified MaxMind GeoIP database file into "
1144 "the database cache using its file name as source "
1146 group
.add_option("-i", "--init-rir",
1147 action
="store_true", dest
="init_del",
1148 help="initialize or update delegation information")
1152 action
="store_true",
1154 help="use existing delegation files to update the database")
1158 action
="store_true",
1160 help=("initialize or update lir information; can take up to "
1165 action
="store_true",
1167 help=("use existing lir files to update the database; can "
1168 "take up to 5 minutes"))
1172 action
="store_true",
1174 help="download country codes file")
1178 action
="store_true",
1180 help="erase the local database cache")
1183 "--init-asn-descriptions",
1184 action
="store_true",
1185 dest
="init_asn_descriptions",
1186 help=("initialize or update asn description information"))
1189 "--reload-asn-descriptions",
1190 action
="store_true",
1191 dest
="reload_asn_descriptions",
1192 help=("Use existing asn descriptions to update database"))
1195 "--init-asn-assignments",
1196 action
="store_true",
1197 dest
="init_asn_assignments",
1198 help=("initialize or update asn assignment information"))
1201 "--reload-asn-assignments",
1202 action
="store_true",
1203 dest
="reload_asn_assignments",
1204 help=("Use existing asn assignments to update database"))
1205 parser
.add_option_group(group
)
1206 group
= optparse
.OptionGroup(
1207 parser
, "Lookup modes",
1208 "Pick at most one of these modes to look up data in the "
1209 "local cache. May not be combined with cache modes.")
1215 help=("look up country code and name for the specified IPv4 "
1222 help=("look up country code and name for the specified IPv6 "
1229 help="look up country code and name for the specified ASN")
1235 callback
=split_callback
,
1236 metavar
="CC[:type]",
1238 help=("look up all allocations (or only those for number "
1239 "type 'ipv4', 'ipv6', or 'asn' if provided) in the "
1240 "delegation cache for the specified two-letter country "
1247 callback
=split_callback
,
1248 metavar
="CN[:type]",
1250 help=("look up all allocations (or only those for number "
1251 "type 'ipv4', 'ipv6', or 'asn' if provided) in the "
1252 "delegation cache for the specified full country "
1260 help=("compare assignments to the specified country code "
1261 "with overlapping assignments in other data "
1262 "sources; can take some time and produce some "
1269 help=("look up country name for specified country code"))
1271 "--lookup-org-by-ip",
1272 "--lookup-org-by-ip",
1274 dest
="lookup_org_by_ip",
1275 help=("look up ASN and AS Description for an IP address"))
1277 "--lookup-org-by-range",
1278 "--lookup-org-by-range",
1279 action
="store_true",
1280 dest
="lookup_org_by_range",
1281 help=("look up announced networks in a range of addresses; "
1282 "requires --range-start and --range-end to be set"))
1288 help=("Specify the start of a range of addresses"))
1290 "--range-end", "--range-end",
1293 help=("Specify the end of a range of addresses"))
1294 parser
.add_option_group(group
)
1295 group
= optparse
.OptionGroup(parser
, "Export modes")
1299 action
="store_true",
1301 help=("export the lookup database to GeoIPCountryWhois.csv and "
1302 "v6.csv files in the format used to build the debian "
1303 "package geoip-database"))
1308 dest
="geoip_v4_filename",
1309 help=("The filename to write the IPv4 GeoIP dataset to"))
1314 dest
="geoip_v6_filename",
1315 help=("The filename to write the IPv6 GeoIP dataset to"))
1320 dest
="geoip_asn_filename",
1321 help=("The filename to write the IPv4 GeoIP ASNum dataset to"))
1322 parser
.add_option_group(group
)
1324 group
= optparse
.OptionGroup(parser
, "Network modes")
1325 (options
, args
) = parser
.parse_args()
1326 if options
.hack_the_internet
:
1327 print("all your bases are belong to us!")
1329 options_dict
= vars(options
)
1331 for mode
in ["init_maxmind", "reload_maxmind", "import_maxmind",
1332 "init_del", "init_lir", "reload_del", "reload_lir",
1333 "download_cc", "erase_cache", "ipv4", "ipv6", "asn",
1334 "cc", "cn", "compare", "what_cc", "init_asn_descriptions",
1335 "reload_asn_descriptions", "init_asn_assignments",
1336 "reload_asn_assignments", "lookup_org_by_ip",
1337 "lookup_org_by_range", "export"]:
1338 if mode
in options_dict
and options_dict
.get(mode
):
1341 parser
.error("only 1 cache or lookup mode allowed")
1343 parser
.error("must provide 1 cache or lookup mode")
1344 database_cache
= DatabaseCache(options
.dir, options
.verbose
)
1345 if options
.erase_cache
:
1346 database_cache
.erase_database()
1348 if not database_cache
.connect_to_database():
1349 print("Could not connect to database.")
1350 print("You may need to erase it using -e and then reload it "
1351 "using -d/-z. Exiting.")
1353 database_cache
.set_db_version()
1354 downloader_parser
= DownloaderParser(options
.dir, database_cache
,
1356 lookup
= Lookup(options
.dir, database_cache
)
1357 if options
.ipv4
or options
.ipv6
or options
.asn
or options
.cc \
1358 or options
.cn
or options
.compare
:
1359 if downloader_parser
.check_rir_file_mtimes():
1360 print("Your cached RIR files are older than 24 hours; you "
1361 "probably want to update them.")
1363 lookup
.asn_lookup(options
.asn
)
1364 elif options
.lookup_org_by_ip
:
1365 lookup
.lookup_org_by_ip(options
.lookup_org_by_ip
)
1366 elif options
.lookup_org_by_range
:
1367 if not (options
.range_start
and options
.range_end
):
1368 print("You must specify the start and end addresses; "
1369 "see --range-start and --range-end")
1371 lookup
.lookup_org_by_range(options
.range_start
, options
.range_end
)
1373 lookup
.lookup_ip_address(options
.ipv4
)
1375 lookup
.lookup_ip_address(options
.ipv6
)
1376 elif options
.cc
or options
.cn
or options
.what_cc
:
1379 country
= options
.cc
.upper()
1380 elif not lookup
.knows_country_names():
1381 print("Need to download country codes first before looking "
1382 "up countries by name.")
1383 elif options
.what_cc
:
1384 country
= options
.what_cc
.upper()
1385 country_name
= lookup
.get_name_from_country_code(country
)
1387 print(("Hmm...%s? That would be %s."
1388 % (options
.what_cc
, country_name
)))
1391 print(("Hmm, %s? We're not sure either. Are you sure that's "
1392 "a country code?" % options
.what_cc
))
1395 country
= lookup
.get_country_code_from_name(options
.cn
)
1397 print("It appears your search did not match a country.")
1399 types
= ["ipv4", "ipv6", "asn"]
1400 if hasattr(options
, 'type_filter') and \
1401 options
.type_filter
.lower() in types
:
1402 types
= [options
.type_filter
.lower()]
1403 for request
in types
:
1404 print("\n".join(lookup
.fetch_rir_blocks_by_country(
1406 elif options
.compare
:
1407 print("Comparing assignments with overlapping assignments in other "
1409 lookup
.lookup_countries_in_different_source(options
.compare
)
1410 elif options
.init_maxmind
or options
.reload_maxmind
:
1411 if options
.init_maxmind
:
1412 print("Downloading Maxmind GeoIP files...")
1413 downloader_parser
.download_maxmind_files()
1414 print("Importing Maxmind GeoIP files...")
1415 downloader_parser
.parse_maxmind_files()
1416 elif options
.import_maxmind
:
1417 print("Importing Maxmind GeoIP files...")
1418 downloader_parser
.import_maxmind_file(options
.import_maxmind
)
1419 elif options
.init_del
or options
.reload_del
:
1420 if options
.init_del
:
1421 print("Downloading RIR files...")
1422 downloader_parser
.download_rir_files()
1423 print("Verifying RIR files...")
1424 downloader_parser
.verify_rir_files()
1425 print("Importing RIR files...")
1426 downloader_parser
.parse_rir_files()
1427 elif options
.init_lir
or options
.reload_lir
:
1428 if options
.init_lir
:
1429 print("Downloading LIR delegation files...")
1430 downloader_parser
.download_lir_files()
1431 print("Importing LIR files...")
1432 downloader_parser
.parse_lir_files()
1433 elif options
.download_cc
:
1434 print("Downloading country code file...")
1435 downloader_parser
.download_country_code_file()
1436 elif options
.init_asn_descriptions
or options
.reload_asn_descriptions
:
1437 if options
.init_asn_descriptions
:
1438 print("Downloading ASN Descriptions...")
1439 downloader_parser
.download_asn_description_file()
1440 print("Importing ASN Descriptions...")
1441 downloader_parser
.parse_asn_description_file()
1442 elif options
.init_asn_assignments
or options
.reload_asn_assignments
:
1443 if options
.init_asn_assignments
:
1444 print("Downloading ASN Assignments...")
1445 downloader_parser
.download_asn_assignment_files()
1446 print("Importing ASN Assignments...")
1447 downloader_parser
.parse_asn_assignment_files()
1448 elif options
.export
:
1449 v4_file
= options
.geoip_v4_filename
or "GeoIPCountryWhois.csv"
1450 v6_file
= options
.geoip_v6_filename
or "v6.csv"
1451 asn_file
= options
.geoip_asn_filename
or "GeoIPASNum.csv"
1452 print("Exporting GeoIP IPv4 to %s" % v4_file
)
1453 database_cache
.export_geoip(lookup
, v4_file
, 'ipv4')
1454 print("Exporting GeoIP IPv6 to %s" % v6_file
)
1455 database_cache
.export_geoip(lookup
, v6_file
, 'ipv6')
1456 print("Exporting GeoIP IPv4 ASNum to %s" % asn_file
)
1457 database_cache
.export_asn(asn_file
, 'ipv4')
1459 # print("Exporting GeoIP IPv6 ASNum to %s" % asn_file)
1460 # database_cache.export_geoip(asn_file, 'ipv6')
1461 database_cache
.commit_and_close_database()
1463 if __name__
== "__main__":