2 # -*- coding: utf-8 -*-
4 # For the people of Smubworld!
17 if sys
.version_info
[0] >= 3:
19 import ipaddress
as ipaddr
20 from urllib
.request
import (urlopen
, Request
)
21 from urllib
.error
import URLError
24 import ConfigParser
as configparser
25 from urllib2
import (urlopen
, Request
, URLError
)
26 from embedded_ipaddr
import ipaddr
27 ipaddr
.ip_address
= ipaddr
.IPAddress
29 is_win32
= (sys
.platform
== "win32")
31 __program__
= 'blockfinder'
32 __url__
= 'https://github.com/ioerror/blockfinder/'
33 __author__
= 'Jacob Appelbaum <jacob@appelbaum.net>, David <db@d1b.org>'
34 __copyright__
= 'Copyright (c) 2010'
35 __license__
= 'See LICENSE for licensing information'
36 __version__
= '3.1415'
39 from future
import antigravity
45 def __init__(self
, cache_dir
, verbose
=False):
46 self
.cache_dir
= cache_dir
47 self
.verbose
= verbose
50 self
.db_version
= "0.0.4"
51 self
.db_path
= os
.path
.join(self
.cache_dir
+ "sqlitedb")
53 def erase_database(self
):
54 """ Erase the database file. """
55 if os
.path
.exists(self
.db_path
):
56 os
.remove(self
.db_path
)
58 def connect_to_database(self
):
59 """ Connect to the database cache, possibly after creating it if
60 it doesn't exist yet, or after making sure an existing
61 database cache has the correct version. Return True if a
62 connection could be established, False otherwise. """
63 if not os
.path
.exists(self
.cache_dir
):
65 print("Initializing the cache directory...")
66 os
.mkdir(self
.cache_dir
)
67 if os
.path
.exists(self
.db_path
):
68 cache_version
= self
.get_db_version()
70 cache_version
= "0.0.1"
71 if cache_version
!= self
.db_version
:
72 print(("The existing database cache uses version %s, "
73 "not the expected %s." % (cache_version
,
76 self
.conn
= sqlite3
.connect(self
.db_path
)
77 self
.cursor
= self
.conn
.cursor()
78 self
.create_assignments_table()
79 self
.create_asn_description_table()
80 self
.create_asn_assignments_table()
83 def __get_default_config_file_obj(self
):
85 file_path
= os
.path
.join(self
.cache_dir
, 'db.cfg')
86 if not os
.path
.exists(file_path
):
88 return open(file_path
, open_flags
)
90 def _get_db_config(self
, file_obj
=None):
91 """ Return the database configuration object from the provided
92 file_obj if provided, otherwise from the default database
93 configuration file. """
95 file_obj
= self
.__get
_default
_config
_file
_obj
()
96 config
= configparser
.SafeConfigParser()
97 config
.readfp(file_obj
)
101 def set_db_version(self
, file_obj
=None):
102 """ Set the database version string in the config file. """
104 file_obj
= self
.__get
_default
_config
_file
_obj
()
105 config
= self
._get
_db
_config
()
106 if not config
.has_section('db'):
107 config
.add_section('db')
108 config
.set('db', 'version', self
.db_version
)
109 config
.write(file_obj
)
112 def get_db_version(self
):
113 """ Read and return the database version string from the config
115 config
= self
._get
_db
_config
()
116 if not config
.has_section('db'):
118 return config
.get('db', 'version')
120 def commit_and_close_database(self
):
124 def create_assignments_table(self
):
125 """ Create the assignments table that stores all assignments from
126 IPv4/IPv6/ASN to country code. Blocks are stored as first hex
127 of and first hex after the assignment. Numbers are stored
128 as hex strings, because SQLite's INTEGER type only holds up to
129 63 unsigned bits, which is not enough to store a /64 IPv6
130 block. Hex strings have leading zeros, with IPv6 addresses
131 being 33 hex characters long and IPv4 addresses and ASN being
132 9 hex characters long. The first number after an assignment
133 range is stored instead of the last number in the range to
134 facilitate comparisons with neighboring ranges. """
135 sql
= ('CREATE TABLE IF NOT EXISTS assignments(start_hex TEXT, '
136 'next_start_hex TEXT, num_type TEXT, country_code TEXT, '
137 'source_type TEXT, source_name TEXT)')
138 self
.cursor
.execute(sql
)
141 def create_asn_description_table(self
):
142 """ Create the assignments table that stores all the descriptions
143 associated with ASNs. """
144 sql
= ('CREATE TABLE IF NOT EXISTS asn_descriptions(as_num INT, '
145 'source_name TEXT, description TEXT)')
146 self
.cursor
.execute(sql
)
147 sql
= ('CREATE INDEX IF NOT EXISTS DescriptionsByASN ON '
148 'asn_descriptions ( as_num )')
149 self
.cursor
.execute(sql
)
152 def create_asn_assignments_table(self
):
153 """ Create the assignments table that stores the assignments from
155 #XXX: IPv6 not yet supported. (Not available from routeviews?)
156 sql
= ('CREATE TABLE IF NOT EXISTS asn_assignments(start_hex TEXT, '
157 'next_start_hex TEXT, num_type TEXT, as_num INT, '
158 'source_type TEXT, source_name TEXT, PRIMARY KEY(start_hex, '
160 self
.cursor
.execute(sql
)
161 sql
= ('CREATE INDEX IF NOT EXISTS ASNEntriesByStartHex on '
162 'asn_assignments ( start_hex )')
163 self
.cursor
.execute(sql
)
166 def delete_assignments(self
, source_type
):
167 """ Delete all assignments from the database cache matching a
168 given source type ("rir", "lir", etc.). """
169 sql
= 'DELETE FROM assignments WHERE source_type = ?'
170 self
.cursor
.execute(sql
, (source_type
, ))
173 def delete_asn_descriptions(self
):
174 """ Delete all asn descriptions from the database cache. """
175 sql
= 'DELETE FROM asn_descriptions'
176 self
.cursor
.execute(sql
)
179 def delete_asn_assignments(self
):
180 """ Delete all the bgp netblock to as entries """
181 sql
= 'DELETE FROM asn_assignments'
182 self
.cursor
.execute(sql
)
185 def insert_assignment(self
, start_num
, end_num
, num_type
,
186 country_code
, source_type
, source_name
):
187 """ Insert an assignment into the database cache, without
188 commiting after the insertion. """
189 sql
= ('INSERT INTO assignments (start_hex, next_start_hex, '
190 'num_type, country_code, source_type, source_name) '
191 'VALUES (?, ?, ?, ?, ?, ?)')
192 if num_type
== 'ipv6':
193 start_hex
= '%033x' % start_num
194 next_start_hex
= '%033x' % (end_num
+ 1)
196 start_hex
= '%09x' % start_num
197 next_start_hex
= '%09x' % (end_num
+ 1)
198 self
.cursor
.execute(sql
, (start_hex
, next_start_hex
, num_type
,
199 country_code
, source_type
, source_name
))
201 def insert_asn_description(self
, asn
, source_name
, description
):
202 sql
= ('INSERT INTO asn_descriptions (as_num, source_name, description) '
204 self
.cursor
.execute(sql
, (asn
, source_name
, unicode(description
)))
206 def insert_asn_assignment(self
, start_num
, end_num
, num_type
, asn
,
207 source_type
, source_name
):
208 #XXX: This is sqlite specific syntax
209 sql
= ('INSERT OR IGNORE INTO asn_assignments (start_hex, '
210 'next_start_hex, num_type, as_num, source_type, source_name) '
211 'VALUES (?, ?, ?, ?, ?, ?)')
212 if num_type
== 'ipv6':
213 start_hex
= '%033x' % start_num
214 next_start_hex
= '%033x' % (end_num
+ 1)
216 start_hex
= '%09x' % start_num
217 next_start_hex
= '%09x' % (end_num
+ 1)
218 self
.cursor
.execute(sql
, (start_hex
, next_start_hex
, num_type
, asn
,
219 source_type
, source_name
))
221 def commit_changes(self
):
222 """ Commit changes, e.g., after inserting assignments into the
226 def fetch_assignments(self
, num_type
, country_code
):
227 """ Fetch all assignments from the database cache matching the
228 given number type ("asn", "ipv4", or "ipv6") and country code.
229 The result is a sorted list of tuples containing (start_num,
231 sql
= ('SELECT start_hex, next_start_hex FROM assignments '
232 'WHERE num_type = ? AND country_code = ? '
233 'ORDER BY start_hex')
234 self
.cursor
.execute(sql
, (num_type
, country_code
))
236 for row
in self
.cursor
:
237 result
.append((long(row
[0], 16), long(row
[1], 16) - 1))
240 def fetch_country_code(self
, num_type
, source_type
, lookup_num
):
241 """ Fetch the country code from the database cache that is
242 assigned to the given number (e.g., IPv4 address in decimal
243 notation), number type (e.g., "ipv4"), and source type (e.g.,
245 sql
= ('SELECT country_code FROM assignments WHERE num_type = ? '
246 'AND source_type = ? AND start_hex <= ? '
247 'AND next_start_hex > ?')
248 if num_type
== 'ipv6':
249 lookup_hex
= '%033x' % long(lookup_num
)
251 lookup_hex
= '%09x' % long(lookup_num
)
252 self
.cursor
.execute(sql
, (num_type
, source_type
, lookup_hex
,
254 row
= self
.cursor
.fetchone()
258 def fetch_country_blocks_in_other_sources(self
, first_country_code
):
259 """ Fetch all assignments matching the given country code, then look
260 up to which country code(s) the same number ranges are assigned in
261 other source types. Return 8-tuples containing (1) first source
262 type, (2) first and (3) last number of the assignment in the first
263 source type, (4) second source type, (5) first and (6) last number
264 of the assignment in the second source type, (7) country code in
265 the second source type, and (8) number type. """
266 sql
= ('SELECT first.source_type, first.start_hex, '
267 'first.next_start_hex, second.source_type, '
268 'second.start_hex, second.next_start_hex, '
269 'second.country_code, first.num_type '
270 'FROM assignments AS first '
271 'JOIN assignments AS second '
272 'WHERE first.country_code = ? '
273 'AND first.start_hex <= second.next_start_hex '
274 'AND first.next_start_hex >= second.start_hex '
275 'AND first.num_type = second.num_type '
276 'ORDER BY first.source_type, first.start_hex, '
277 'second.source_type, second.start_hex')
278 self
.cursor
.execute(sql
, (first_country_code
, ))
280 for row
in self
.cursor
:
281 result
.append((str(row
[0]), long(row
[1], 16),
282 long(row
[2], 16) - 1, str(row
[3]), long(row
[4], 16),
283 long(row
[5], 16) - 1, str(row
[6]), str(row
[7])))
286 def fetch_org_by_ip_address(self
, lookup_str
, num_type
):
287 if num_type
== 'ipv4':
288 lookup_hex
= '%09x' % long(int(lookup_str
))
290 lookup_hex
= '%033x' % long(int(lookup_str
))
291 sql
= ('SELECT asn_descriptions.as_num, asn_descriptions.description, '
292 'asn_assignments.start_hex, asn_assignments.next_start_hex '
293 'FROM asn_descriptions JOIN asn_assignments ON '
294 'asn_assignments.as_num = asn_descriptions.as_num '
295 'WHERE num_type = ? AND start_hex <= ? AND next_start_hex > ?')
296 self
.cursor
.execute(sql
, (num_type
, lookup_hex
, lookup_hex
))
297 row
= self
.cursor
.fetchall()
301 def fetch_org_by_ip_range(self
, lookup_start
, lookup_end
, num_type
):
302 if num_type
== 'ipv4':
303 lookup_start_hex
= '%09x' % long(int(lookup_start
))
304 lookup_end_hex
= '%09x' % long(int(lookup_end
))
306 lookup_start_hex
= '%033x' % long(int(lookup_start
))
307 lookup_end_hex
= '%033x' % long(int(lookup_end
))
309 sql
= ('SELECT asn_descriptions.as_num, asn_descriptions.description, '
310 'asn_assignments.start_hex, asn_assignments.next_start_hex '
311 'FROM asn_descriptions JOIN asn_assignments ON '
312 'asn_assignments.as_num = asn_descriptions.as_num '
313 'WHERE num_type = ? AND start_hex >= ? AND next_start_hex <= ?')
314 self
.cursor
.execute(sql
, (num_type
, lookup_start_hex
, lookup_end_hex
))
315 row
= self
.cursor
.fetchall()
319 def _concatenate_and_write(self
, records
, write_function
=None, record_filter
=None, bits
=32):
323 start_hex
, next_start_hex
, record
= \
324 long(row
[0], 16), long(row
[1], 16), str(row
[2])
325 nb
= bits
- int(log(next_start_hex
- start_hex
, 2))
326 net
= ipaddr
.IPNetwork("%s/%d" %
327 (ipaddr
.IPAddress(start_hex
),nb
))
328 if callable(record_filter
):
329 record
= record_filter(record
)
333 # Concatenate adjacent blocks of the same country
334 if netblocks
and netblocks
[-1][1] == record
:
335 pn
= netblocks
[-1][0]
336 nb
= bits
- int(log(int(net
.network
) + \
337 int(net
.numhosts
) - int(pn
.network
), 2))
338 netblocks
[-1] = (ipaddr
.IPNetwork("%s/%d" % \
339 (pn
.network
, nb
)), record
)
341 # if the adjacent blocks aren't the same country,
342 # write the last block out to csv and add the new block
343 # to the list for possible concatenation
345 prev_n
,prev_record
= netblocks
.pop()
347 write_function(prev_n
,prev_record
)
348 netblocks
.append((net
,record
))
350 # this is the base case
352 netblocks
.append((net
,record
))
354 def export_asn(self
, filename
, num_type
):
355 """ Export assignments to the CSV format used to build the geoip-database asn lookup """
356 sql
= ('SELECT start_hex, next_start_hex, as_num '
357 'FROM asn_assignments WHERE num_type = ? ORDER BY start_hex')
358 self
.cursor
.execute(sql
, (num_type
,))
360 f
= open(filename
, 'w')
362 print("Unable to open %s" % filename
)
365 def write_csv_line(network
, asn
):
367 f
.write(""""%s","%s","%d","%d","%s"\n""" % (network
.network
,
368 network
.broadcast
, int(network
.network
),
369 int(network
.broadcast
), asn
))
370 if num_type
== 'ipv6': ip_bits
= 128
371 elif num_type
== 'ipv4': ip_bits
= 32
374 self
._concatenate
_and
_write
(self
.cursor
, write_function
=write_csv_line
,
378 def export_geoip(self
, lookup
, filename
, num_type
):
379 """ Export assignments to the CSV format used to build the
380 geoip-database package """
382 sql
= ('SELECT start_hex, next_start_hex, country_code '
383 'FROM assignments WHERE num_type = ? ORDER BY start_hex')
384 self
.cursor
.execute(sql
, (num_type
,))
387 f
= open(filename
, 'w')
389 print("Unable to open %s" % filename
)
392 def write_csv_line(network
, country_code
):
393 country_name
= lookup
.get_name_from_country_code(country_code
)
395 country_name
= country_name
.split("#")[0].strip() #Drop comments
396 f
.write(""""%s","%s","%d","%d","%s","%s"\n""" % (network
.network
,
397 network
.broadcast
, int(network
.network
),
398 int(network
.broadcast
), country_code
, country_name
))
400 if num_type
== 'ipv6': ip_bits
= 128
401 elif num_type
== 'ipv4': ip_bits
= 32
404 self
._concatenate
_and
_write
(self
.cursor
, write_function
=write_csv_line
,
405 record_filter
=str.upper
, bits
=ip_bits
)
408 class DownloaderParser
:
409 def __init__(self
, cache_dir
, database_cache
, user_agent
,
411 self
.cache_dir
= cache_dir
412 self
.database_cache
= database_cache
413 self
.user_agent
= user_agent
414 self
.verbose
= verbose
417 http://geolite.maxmind.com/download/geoip/database/GeoIPCountryCSV.zip
418 http://geolite.maxmind.com/download/geoip/database/GeoIPv6.csv.gz
422 ftp://ftp.arin.net/pub/stats/arin/delegated-arin-extended-latest
423 ftp://ftp.ripe.net/ripe/stats/delegated-ripencc-latest
424 ftp://ftp.afrinic.net/pub/stats/afrinic/delegated-afrinic-latest
425 ftp://ftp.apnic.net/pub/stats/apnic/delegated-apnic-latest
426 ftp://ftp.lacnic.net/pub/stats/lacnic/delegated-lacnic-latest
430 ftp://ftp.ripe.net/ripe/dbase/split/ripe.db.inetnum.gz
431 ftp://ftp.ripe.net/ripe/dbase/split/ripe.db.inet6num.gz
434 COUNTRY_CODE_URL
= "http://www.iso.org/iso/home/standards/country_codes/country_names_and_code_elements_txt-temp.htm"
436 ASN_DESCRIPTION_URL
= "http://www.cidr-report.org/as2.0/autnums.html"
438 ASN_ASSIGNMENT_URLS
= """
439 http://archive.routeviews.org/oix-route-views/oix-full-snapshot-latest.dat.bz2
442 def download_maxmind_files(self
):
443 """ Download all LIR delegation urls. """
444 for maxmind_url
in self
.MAXMIND_URLS
.split():
445 self
._download
_to
_cache
_dir
(maxmind_url
)
447 def download_rir_files(self
):
448 """ Download all RIR delegation files including md5 checksum. """
449 for rir_url
in self
.RIR_URLS
.split():
450 rir_md5_url
= rir_url
+ '.md5'
451 self
._download
_to
_cache
_dir
(rir_url
)
452 self
._download
_to
_cache
_dir
(rir_md5_url
)
454 def download_lir_files(self
):
455 """ Download all LIR delegation urls. """
456 for lir_url
in self
.LIR_URLS
.split():
457 self
._download
_to
_cache
_dir
(lir_url
)
459 def download_country_code_file(self
):
460 """ Download and save the latest semicolon-separated open country
462 self
._download
_to
_cache
_dir
(self
.COUNTRY_CODE_URL
)
464 def download_asn_description_file(self
):
465 """ Download and save the latest ASN to Name report from
467 self
._download
_to
_cache
_dir
(self
.ASN_DESCRIPTION_URL
)
469 def download_asn_assignment_files(self
):
470 """ Download and save the latest routing snapshots. """
471 for assignment_url
in self
.ASN_ASSIGNMENT_URLS
.split():
472 self
._download
_to
_cache
_dir
(assignment_url
)
474 def _download_to_cache_dir(self
, url
):
475 """ Fetch a resource (with progress bar) and store contents to the
476 local cache directory under the file name given in the URL. """
477 if not os
.path
.exists(self
.cache_dir
):
479 print("Initializing the cache directory...")
480 os
.mkdir(self
.cache_dir
)
481 filename
= url
.split('/')[-1]
486 req
.add_header('User-Agent', self
.user_agent
)
487 # TODO Allow use of a proxy.
488 #req.set_proxy(host, type)
490 fetcher
= urlopen(req
)
491 except URLError
as err
:
492 msg
= "An error occurred while attempting to cache file from:"
493 print(("%s\n\t%s\n\t%s" % (msg
, url
, str(err
))))
495 length_header
= fetcher
.headers
.get("Content-Length")
498 expected_bytes
= int(length_header
)
499 print(("Fetching %d kilobytes" %
500 round(float(expected_bytes
/ 1024), 2)))
501 download_started
= time
.time()
502 output_file
= open(os
.path
.join(self
.cache_dir
, filename
), "wb")
503 received_bytes
, seconds_elapsed
= 0, 0
505 seconds_elapsed
= time
.time() - download_started
506 if expected_bytes
>= 0:
507 self
._update
_progress
_bar
(received_bytes
, expected_bytes
,
509 chunk
= fetcher
.read(1024)
511 if expected_bytes
>= 0 and received_bytes
!= expected_bytes
:
512 print(("Expected %s bytes, only received %s" %
513 (expected_bytes
, received_bytes
)))
516 received_bytes
+= len(chunk
)
517 output_file
.write(chunk
)
520 def _update_progress_bar(self
, received_bytes
, expected_bytes
,
522 """ Write a progress bar to the console. """
524 rows
= 100 # use some WinCon function for these?
525 columns
= 80 # but not really important.
528 rows
, columns
= list(map(int, os
.popen('stty size', 'r'
531 if seconds_elapsed
== 0:
533 percent_done
= float(received_bytes
) / float(expected_bytes
)
534 caption
= "%.2f K/s" % (received_bytes
/ 1024 / seconds_elapsed
)
535 width
= columns
- 4 - len(caption
)
536 sys
.stdout
.write("[%s>%s] %s%s" % (
537 "=" * int(percent_done
* width
),
538 "." * (width
- int(percent_done
* width
)), caption
, EOL
))
541 def check_rir_file_mtimes(self
):
542 """ Return True if the mtime of any RIR file in our cache directory
543 is > 24 hours, False otherwise. """
544 if not os
.path
.exists(self
.cache_dir
):
546 for rir_url
in self
.RIR_URLS
.split():
547 rir_path
= os
.path
.join(self
.cache_dir
,
548 rir_url
.split('/')[-1])
549 if os
.path
.exists(rir_path
):
550 rir_stat
= os
.stat(rir_path
)
551 if (time
.time() - rir_stat
.st_mtime
) > 86400:
555 def verify_rir_files(self
):
556 """ Compute md5 checksums of all RIR files, compare them to the
557 provided .md5 files, and return True if the two checksums match,
558 or False otherwise. """
559 for rir_url
in self
.RIR_URLS
.split():
560 rir_path
= os
.path
.join(self
.cache_dir
,
561 rir_url
.split('/')[-1])
562 rir_md5_path
= os
.path
.join(self
.cache_dir
,
563 rir_url
.split('/')[-1] + '.md5')
564 if not os
.path
.exists(rir_md5_path
) or \
565 not os
.path
.exists(rir_path
):
567 rir_md5_file
= open(rir_md5_path
, 'r')
568 expected_checksum
= rir_md5_file
.read()
570 if "=" in expected_checksum
:
571 expected_checksum
= expected_checksum
.split("=")[-1].strip()
572 elif expected_checksum
== "":
574 print("No checksum... skipping verification...")
577 regex
= re
.compile("[a-f0-9]{32}")
578 regres
= regex
.findall(expected_checksum
)
580 print("Error: mutiple checksum found")
581 elif len(regres
) < 1:
582 print("Error: no checksum found")
584 expected_checksum
= regres
[0]
585 computed_checksum
= ""
586 rir_file
= open(rir_path
, 'rb')
587 rir_data
= rir_file
.read()
589 computed_checksum
= str(hashlib
.md5(rir_data
).hexdigest())
590 if expected_checksum
!= computed_checksum
:
591 print(("The computed md5 checksum of %s, %s, does *not* "
592 "match the provided checksum %s!" %
593 (rir_path
, computed_checksum
, expected_checksum
)))
595 def parse_maxmind_files(self
, maxmind_urls
=None):
596 """ Parse locally cached MaxMind files and insert assignments to the
597 local database cache, overwriting any existing MaxMind
600 maxmind_urls
= self
.MAXMIND_URLS
.split()
601 self
.database_cache
.delete_assignments('maxmind')
602 for maxmind_url
in maxmind_urls
:
603 maxmind_path
= os
.path
.join(self
.cache_dir
,
604 maxmind_url
.split('/')[-1])
605 if not os
.path
.exists(maxmind_path
):
606 print("Unable to find %s." % maxmind_path
)
608 if maxmind_path
.endswith('.zip'):
609 maxmind_zip_path
= zipfile
.ZipFile(maxmind_path
)
610 for contained_filename
in maxmind_zip_path
.namelist():
611 content
= maxmind_zip_path
.read(contained_filename
)
612 self
._parse
_maxmind
_content
(content
, 'maxmind',
614 elif maxmind_path
.endswith('.gz'):
615 content
= gzip
.open(maxmind_path
).read()
616 self
._parse
_maxmind
_content
(content
, 'maxmind', 'maxmind')
617 self
.database_cache
.commit_changes()
619 def import_maxmind_file(self
, maxmind_path
):
620 self
.database_cache
.delete_assignments(maxmind_path
)
621 if not os
.path
.exists(maxmind_path
):
622 print("Unable to find %s." % maxmind_path
)
624 content
= open(maxmind_path
).read()
625 self
._parse
_maxmind
_content
(content
, maxmind_path
, maxmind_path
)
626 self
.database_cache
.commit_changes()
628 def _parse_maxmind_content(self
, content
, source_type
, source_name
):
629 keys
= ['start_str', 'end_str', 'start_num', 'end_num',
630 'country_code', 'country_name']
631 for line
in content
.decode('utf-8').split('\n'):
632 if len(line
.strip()) == 0 or line
.startswith("#"):
634 line
= line
.replace('"', '').replace(' ', '').strip()
635 parts
= line
.split(',')
636 entry
= dict((k
, v
) for k
, v
in zip(keys
, parts
))
637 start_num
= int(entry
['start_num'])
638 end_num
= int(entry
['end_num'])
639 country_code
= str(entry
['country_code'])
640 start_ipaddr
= ipaddr
.ip_address(entry
['start_str'])
641 if isinstance(start_ipaddr
, ipaddr
.IPv4Address
):
645 self
.database_cache
.insert_assignment(start_num
, end_num
,
646 num_type
, country_code
, source_type
, source_name
)
648 def parse_rir_files(self
, rir_urls
=None):
649 """ Parse locally cached RIR files and insert assignments to the local
650 database cache, overwriting any existing RIR assignments. """
652 rir_urls
= self
.RIR_URLS
.split()
653 self
.database_cache
.delete_assignments('rir')
654 keys
= "registry country_code type start value date status"
655 for rir_url
in rir_urls
:
656 rir_path
= os
.path
.join(self
.cache_dir
,
657 rir_url
.split('/')[-1])
658 if not os
.path
.exists(rir_path
):
659 print("Unable to find %s." % rir_path
)
661 for line
in open(rir_path
, 'r'):
662 if line
.startswith("#"):
664 entry
= dict((k
, v
) for k
, v
in
665 zip(keys
.split(), line
.strip().split("|")))
666 source_name
= str(entry
['registry'])
667 country_code
= str(entry
['country_code'])
668 if source_name
.replace(".", "", 1).isdigit() or country_code
== "*":
670 num_type
= entry
['type']
671 if num_type
== 'asn':
672 start_num
= end_num
= int(entry
['start'])
673 elif num_type
== 'ipv4':
674 start_num
= int(ipaddr
.IPv4Address(entry
['start']))
675 end_num
= start_num
+ int(entry
['value']) - 1
676 elif num_type
== 'ipv6':
677 network_str
= entry
['start'] + '/' + entry
['value']
678 network_ipaddr
= ipaddr
.IPv6Network(network_str
)
679 start_num
= int(network_ipaddr
.network_address
)
680 end_num
= int(network_ipaddr
.broadcast_address
)
681 self
.database_cache
.insert_assignment(start_num
,
682 end_num
, num_type
, country_code
, 'rir',
684 self
.database_cache
.commit_changes()
686 def parse_lir_files(self
, lir_urls
=None):
687 """ Parse locally cached LIR files and insert assignments to the local
688 database cache, overwriting any existing LIR assignments. """
690 lir_urls
= self
.LIR_URLS
.split()
691 self
.database_cache
.delete_assignments('lir')
692 for lir_url
in lir_urls
:
693 lir_path
= os
.path
.join(self
.cache_dir
,
694 lir_url
.split('/')[-1])
695 if not os
.path
.exists(lir_path
):
696 print("Unable to find %s." % lir_path
)
698 if lir_path
.endswith('.gz'):
699 lir_file
= gzip
.open(lir_path
)
701 lir_file
= open(lir_path
)
707 for line
in lir_file
:
708 line
= line
.decode('utf-8', 'ignore').replace("\n", "")
711 start_num
, end_num
, country_code
, num_type
= 0, 0, "", ""
712 elif not entry
and "inetnum:" in line
:
714 line
= line
.replace("inetnum:", "").strip()
715 start_str
= line
.split("-")[0].strip()
716 end_str
= line
.split("-")[1].strip()
717 start_num
= int(ipaddr
.IPv4Address(start_str
))
718 end_num
= int(ipaddr
.IPv4Address(end_str
))
721 except Exception as e
:
724 elif not entry
and "inet6num:" in line
:
726 network_str
= line
.replace("inet6num:", "").strip()
727 network_ipaddr
= ipaddr
.IPv6Network(network_str
)
728 start_num
= int(network_ipaddr
.network_address
)
729 end_num
= int(network_ipaddr
.broadcast_address
)
732 except Exception as e
:
735 elif entry
and "country:" in line
:
736 country_code
= line
.replace("country:", "").strip()
737 self
.database_cache
.insert_assignment(start_num
,
738 end_num
, num_type
, country_code
, 'lir', 'ripencc')
739 self
.database_cache
.commit_changes()
741 def parse_asn_description_file(self
, asn_description_url
=None):
742 """ Parse locally cached ASN to Description mappings and insert
743 mappings to the local database cache, overwriting any existing ASN
744 to Name assignments. """
745 if not asn_description_url
:
746 asn_description_url
= self
.ASN_DESCRIPTION_URL
747 self
.database_cache
.delete_asn_descriptions()
748 asn_description_path
= os
.path
.join(self
.cache_dir
,
749 asn_description_url
.split('/')[-1])
750 asn_descriptions
= open(asn_description_path
)
751 source_name
= 'cidr_report'
752 skiplen
= len('<a href="/cgi-bin/as-report?as=AS')
753 for line
in asn_descriptions
:
755 asn
, _name
= line
[skiplen
:].split('&view=2.0')
756 description
= _name
.split('</a>')[1].strip()
757 self
.database_cache
.insert_asn_description(asn
, source_name
,
761 self
.database_cache
.commit_changes()
762 asn_descriptions
.close()
764 def parse_asn_assignment_files(self
, asn_assignment_urls
=None):
765 if not asn_assignment_urls
:
766 asn_assignment_urls
= self
.ASN_ASSIGNMENT_URLS
.split()
767 self
.database_cache
.delete_asn_assignments()
768 for asn_assignment_url
in asn_assignment_urls
:
769 asn_assignment_path
= os
.path
.join(self
.cache_dir
,
770 asn_assignment_url
.split('/')[-1])
771 if not os
.path
.exists(asn_assignment_path
):
772 print("Unable to find %s." % asn_assignment_path
)
774 if asn_assignment_path
.endswith('.bz2'):
775 b
= bz2
.BZ2File(asn_assignment_path
)
777 if line
.startswith("*"):
779 netblock
, path
= l
[1], l
[6:-1]
780 nexthop
, metric
, locprf
, weight
= l
[2],l
[3],l
[4],l
[5]
782 network
= ipaddr
.IPNetwork(netblock
)
783 #XXX add support for other sources too
784 source_type
= 'bgp_snapshot'
785 source_name
= 'routeviews'
787 if isinstance(network
, ipaddr
.IPv4Network
): num_type
= "ipv4"
788 else: num_type
= "ivp6"
790 self
.database_cache
.insert_asn_assignment(int(network
.network
),
791 int(network
.broadcast
), num_type
, path
[-1],
792 source_type
, source_name
)
795 def __init__(self
, cache_dir
, database_cache
, verbose
=False):
796 self
.cache_dir
= cache_dir
797 self
.database_cache
= database_cache
798 self
.verbose
= verbose
800 self
.build_country_code_dictionary()
802 def build_country_code_dictionary(self
):
803 """ Return a dictionary mapping country name to the country
805 country_code_path
= os
.path
.join(self
.cache_dir
,
806 'country_names_and_code_elements_txt-temp.htm')
807 if not os
.path
.exists(country_code_path
):
810 for line
in open(country_code_path
):
811 if line
== "" or line
.startswith("Country ") or ";" not in line
:
813 country_name
, country_code
= line
.strip().split(";")
814 country_name
= ' '.join([part
.capitalize() for part
in \
815 country_name
.split(" ")])
816 self
.map_co
[country_name
] = country_code
818 def knows_country_names(self
):
819 return self
.map_co
is not None
821 def get_name_from_country_code(self
, cc_code
):
822 if not self
.knows_country_names():
824 country_name
= [(key
, value
) for (key
, value
) in \
825 list(self
.map_co
.items()) if value
== cc_code
]
826 if len(country_name
) > 0:
827 return country_name
[0][0]
829 def get_country_code_from_name(self
, country_name
):
830 """ Return the country code for a given country name. """
831 if not self
.knows_country_names():
833 cc_code
= [self
.map_co
[key
] for key
in list(self
.map_co
.keys()) if \
834 key
.upper().startswith(country_name
.upper())]
838 def lookup_ipv6_address(self
, lookup_ipaddr
):
839 print("Reverse lookup for: " + str(lookup_ipaddr
))
840 for source_type
in ['maxmind', 'rir', 'lir']:
841 cc
= self
.database_cache
.fetch_country_code('ipv6',
842 source_type
, int(lookup_ipaddr
))
844 print(source_type
.upper(), "country code:", cc
)
845 cn
= self
.get_name_from_country_code(cc
)
847 print(source_type
.upper(), "country name:", cn
)
849 def lookup_ipv4_address(self
, lookup_ipaddr
):
850 print("Reverse lookup for: " + str(lookup_ipaddr
))
851 maxmind_cc
= self
.database_cache
.fetch_country_code('ipv4', 'maxmind',
854 print('MaxMind country code:', maxmind_cc
)
855 maxmind_cn
= self
.get_name_from_country_code(maxmind_cc
)
857 print('MaxMind country name:', maxmind_cn
)
858 rir_cc
= self
.database_cache
.fetch_country_code('ipv4', 'rir',
861 print('RIR country code:', rir_cc
)
862 rir_cn
= self
.get_name_from_country_code(rir_cc
)
864 print('RIR country name:', rir_cn
)
866 print('Not found in RIR db')
867 lir_cc
= self
.database_cache
.fetch_country_code('ipv4', 'lir',
870 print('LIR country code:', lir_cc
)
871 lir_cn
= self
.get_name_from_country_code(lir_cc
)
873 print('LIR country name:', lir_cn
)
874 if maxmind_cc
and maxmind_cc
!= rir_cc
:
875 print("It appears that the RIR data conflicts with MaxMind's "
876 "data. MaxMind's data is likely closer to being "
877 "correct due to sub-delegation issues with LIR databases.")
879 def lookup_ip_address(self
, lookup_str
):
880 """ Return the country code and name for a given ip address. """
882 lookup_ipaddr
= ipaddr
.ip_address(lookup_str
)
883 if isinstance(lookup_ipaddr
, ipaddr
.IPv4Address
):
884 self
.lookup_ipv4_address(lookup_ipaddr
)
885 elif isinstance(lookup_ipaddr
, ipaddr
.IPv6Address
):
886 self
.lookup_ipv6_address(lookup_ipaddr
)
888 print(("Did not recognize '%s' as either IPv4 or IPv6 "
889 "address." % lookup_str
))
890 except ValueError as e
:
891 print("'%s' is not a valid IP address." % lookup_str
)
893 def asn_lookup(self
, asn
):
894 asn_cc
= self
.database_cache
.fetch_country_code('asn', 'rir', asn
)
896 print("AS country code: %s" % asn_cc
)
897 asn_cn
= self
.get_name_from_country_code(asn_cc
)
899 print("AS country name: %s" % asn_cn
)
901 print("AS%s not found!" % asn
)
903 def fetch_rir_blocks_by_country(self
, request
, country
):
905 for (start_num
, end_num
) in \
906 self
.database_cache
.fetch_assignments(request
, country
):
907 if request
== "ipv4" or request
== "ipv6":
908 start_ipaddr
= ipaddr
.ip_address(start_num
)
909 end_ipaddr
= ipaddr
.ip_address(end_num
)
910 result
+= [str(x
) for x
in
911 ipaddr
.summarize_address_range(
912 start_ipaddr
, end_ipaddr
)]
914 result
.append(str(start_num
))
917 def lookup_countries_in_different_source(self
, first_country_code
):
918 """ Look up all assignments matching the given country code, then
919 look up to which country code(s) the same number ranges are
920 assigned in other source types. Print out the result showing
921 similarities and differences. """
923 " '<' = found assignment range with country code '%s'\n"
924 " '>' = overlapping assignment range with same country code\n"
925 " '*' = overlapping assignment range, first conflict\n"
926 " '#' = overlapping assignment range, second conflict and "
927 "beyond\n ' ' = neighboring assignment range") % (
928 first_country_code
, ))
929 results
= self
.database_cache
.fetch_country_blocks_in_other_sources(
931 prev_first_source_type
= ''
932 prev_first_start_num
= -1
933 cur_second_country_codes
= []
934 for (first_source_type
, first_start_num
, first_end_num
,
935 second_source_type
, second_start_num
, second_end_num
,
936 second_country_code
, num_type
) in results
:
937 if first_source_type
!= prev_first_source_type
:
938 print("\nAssignments in '%s':" % (first_source_type
, ))
939 prev_first_source_type
= first_source_type
940 if first_start_num
!= prev_first_start_num
:
941 cur_second_country_codes
= []
943 prev_first_start_num
= first_start_num
945 if second_end_num
>= first_start_num
and \
946 second_start_num
<= first_end_num
:
947 if first_country_code
!= second_country_code
and \
948 second_country_code
not in cur_second_country_codes
:
949 cur_second_country_codes
.append(second_country_code
)
950 if first_source_type
== second_source_type
:
952 elif len(cur_second_country_codes
) == 0:
954 elif len(cur_second_country_codes
) == 1:
958 if num_type
.startswith("ip") and \
959 second_start_num
== second_end_num
:
960 second_range
= "%s" % (ipaddr
.ip_address(second_start_num
), )
961 elif num_type
.startswith("ip") and \
962 second_start_num
< second_end_num
:
963 second_range
= "%s-%s" % (ipaddr
.ip_address(second_start_num
),
964 ipaddr
.ip_address(second_end_num
))
965 elif second_start_num
< second_end_num
:
966 second_range
= "AS%d-%d" % (second_start_num
, second_end_num
)
968 second_range
= "AS%d" % (second_start_num
, )
969 print("%1s %s %s %s" % (marker
, second_country_code
, second_range
,
970 second_source_type
, ))
972 def _get_network_string_from_range(self
, end
, start
, bits
=32):
973 start
, end
= int(start
,16), int(end
,16)
974 netbits
= bits
- int(log(end
- start
,2))
975 return ipaddr
.IPNetwork("%s/%d" % (ipaddr
.IPAddress(start
), netbits
))
977 def lookup_org_by_ip(self
, lookup_str
):
978 """ Return the ASN and AS Description by IP """
980 lookup_ipaddr
= ipaddr
.IPAddress(lookup_str
)
981 if isinstance(lookup_ipaddr
, ipaddr
.IPv4Address
):
984 elif isinstance(lookup_ipaddr
, ipaddr
.IPv6Address
):
989 rs
= self
.database_cache
.fetch_org_by_ip_address(lookup_ipaddr
, num_type
)
991 network
= self
._get
_network
_string
_from
_range
(r
[3],r
[2], bits
=len_bits
)
992 print("%s in %s announced by AS%s - %s" % (lookup_str
, network
, r
[0], r
[1]))
994 print("'%s' is not a valid IP address." % lookup_str
)
996 print("Did not find any matching announcements containing %s." % lookup_str
)
998 def lookup_org_by_range(self
, start_range
, end_range
):
999 output_str
= "%s announced by AS%s - %s"
1001 a
= ipaddr
.IPAddress(start_range
)
1002 b
= ipaddr
.IPAddress(end_range
)
1003 if isinstance(a
, ipaddr
.IPv4Address
) and isinstance(b
, ipaddr
.IPv4Address
):
1006 elif isinstance(a
, ipaddr
.IPv6Address
) and isinstance(b
, ipaddr
.IPv6Address
):
1011 rs
= self
.database_cache
.fetch_org_by_ip_range(min(a
,b
), max(a
,b
), num_type
)
1013 network
= self
._get
_network
_string
_from
_range
(r
[3],r
[2], bits
=len_bits
)
1014 print(output_str
% (network
, r
[0], r
[1]))
1016 print("%s %s is not a valid IP range." % (start_range
, end_range
))
1018 print("Did not find any matching announcements in range %s %s." % (start_range
, end_range
))
1020 def split_callback(option
, opt
, value
, parser
):
1021 split_value
= value
.split(':')
1022 setattr(parser
.values
, option
.dest
, split_value
[0])
1023 if len(split_value
) > 1 and split_value
[1] != '':
1024 setattr(parser
.values
, 'type_filter', split_value
[1])
1027 """ Where the magic starts. """
1028 usage
= ("Usage: %prog [options]\n\n"
1029 "Example: %prog -v -t mm")
1030 parser
= optparse
.OptionParser(usage
)
1031 parser
.add_option("-v", "--verbose", action
="store_true",
1032 dest
="verbose", help = "be verbose", default
=False)
1033 parser
.add_option("-c", "--cache-dir", action
="store", dest
="dir",
1034 help="set cache directory [default: %default]",
1035 default
=str(os
.path
.expanduser('~')) + "/.blockfinder/")
1036 parser
.add_option("--user-agent", action
="store", dest
="ua",
1037 help=('provide a User-Agent which will be used when '
1038 'fetching delegation files [default: "%default"]'),
1039 default
="Mozilla/5.0 (Windows NT 6.1; rv:17.0) Gecko/20100101 Firefox/17.0")
1040 parser
.add_option("-x", "--hack-the-internet", action
="store_true",
1041 dest
="hack_the_internet", help=optparse
.SUPPRESS_HELP
)
1042 group
= optparse
.OptionGroup(parser
, "Cache modes",
1043 "Pick at most one of these modes to initialize or update "
1044 "the local cache. May not be combined with lookup modes.")
1045 group
.add_option("-m", "--init-maxmind", action
="store_true",
1046 dest
="init_maxmind",
1047 help="initialize or update MaxMind GeoIP database")
1048 group
.add_option("-g", "--reload-maxmind", action
="store_true",
1049 dest
="reload_maxmind",
1050 help=("update cache from existing MaxMind GeoIP database"))
1051 group
.add_option("-r", "--import-maxmind", action
="store",
1052 dest
="import_maxmind", metavar
="FILE",
1053 help=("import the specified MaxMind GeoIP database file into "
1054 "the database cache using its file name as source "
1056 group
.add_option("-i", "--init-rir",
1057 action
="store_true", dest
="init_del",
1058 help="initialize or update delegation information")
1059 group
.add_option("-d", "--reload-rir", action
="store_true",
1061 help="use existing delegation files to update the database")
1062 group
.add_option("-l", "--init-lir", action
="store_true",
1064 help=("initialize or update lir information; can take up to "
1066 group
.add_option("-z", "--reload-lir", action
="store_true",
1068 help=("use existing lir files to update the database; can "
1069 "take up to 5 minutes"))
1070 group
.add_option("-o", "--download-cc", action
="store_true",
1071 dest
="download_cc", help="download country codes file")
1072 group
.add_option("-e", "--erase-cache", action
="store_true",
1073 dest
="erase_cache", help="erase the local database cache")
1074 group
.add_option("-j", "--init-asn-descriptions", action
="store_true",
1075 dest
="init_asn_descriptions",
1076 help=("initialize or update asn description information"))
1077 group
.add_option("-k", "--reload-asn-descriptions", action
="store_true",
1078 dest
="reload_asn_descriptions",
1079 help=("Use existing asn descriptions to update database"))
1080 group
.add_option("-y", "--init-asn-assignments", action
="store_true",
1081 dest
="init_asn_assignments",
1082 help=("initialize or update asn assignment information"))
1083 group
.add_option("-u", "--reload-asn-assignments", action
="store_true",
1084 dest
="reload_asn_assignments",
1085 help=("Use existing asn assignments to update database"))
1086 parser
.add_option_group(group
)
1087 group
= optparse
.OptionGroup(parser
, "Lookup modes",
1088 "Pick at most one of these modes to look up data in the "
1089 "local cache. May not be combined with cache modes.")
1090 group
.add_option("-4", "--ipv4", action
="store", dest
="ipv4",
1091 help=("look up country code and name for the specified IPv4 "
1093 group
.add_option("-6", "--ipv6", action
="store", dest
="ipv6",
1094 help=("look up country code and name for the specified IPv6 "
1096 group
.add_option("-a", "--asn", action
="store", dest
="asn",
1097 help="look up country code and name for the specified ASN")
1098 group
.add_option("-t", "--code", action
="callback", dest
="cc",
1099 callback
=split_callback
, metavar
="CC[:type]", type="str",
1100 help=("look up all allocations (or only those for number "
1101 "type 'ipv4', 'ipv6', or 'asn' if provided) in the "
1102 "delegation cache for the specified two-letter country "
1104 group
.add_option("-n", "--name", action
="callback", dest
="cn",
1105 callback
=split_callback
, metavar
="CN[:type]", type="str",
1106 help=("look up all allocations (or only those for number "
1107 "type 'ipv4', 'ipv6', or 'asn' if provided) in the "
1108 "delegation cache for the specified full country name"))
1109 group
.add_option("-p", "--compare", action
="store", dest
="compare",
1111 help=("compare assignments to the specified country code "
1112 "with overlapping assignments in other data sources; "
1113 "can take some time and produce some long output"))
1114 group
.add_option("-w", "--what-country", action
="store", dest
="what_cc",
1115 help=("look up country name for specified country code"))
1116 group
.add_option("--lookup-org-by-ip", "--lookup-org-by-ip",
1117 action
="store", dest
="lookup_org_by_ip",
1118 help=("look up ASN and AS Description for an IP address"))
1119 group
.add_option("--lookup-org-by-range", "--lookup-org-by-range",
1120 action
="store_true", dest
="lookup_org_by_range",
1121 help=("look up announced networks in a range of addresses; "
1122 "requires --range-start and --range-end to be set"))
1123 group
.add_option("--range-start", "--range-start",
1124 action
="store", dest
="range_start",
1125 help=("Specify the start of a range of addresses"))
1126 group
.add_option("--range-end", "--range-end",
1127 action
="store", dest
="range_end",
1128 help=("Specify the end of a range of addresses"))
1129 parser
.add_option_group(group
)
1130 group
= optparse
.OptionGroup(parser
, "Export modes")
1131 group
.add_option("--export-geoip", "--export-geoip", action
="store_true",
1133 help=("export the lookup database to GeoIPCountryWhois.csv and "
1134 "v6.csv files in the format used to build the debian "
1135 "package geoip-database"))
1136 group
.add_option("--geoip-v4-file", "--geoip-v4-file", action
="store",
1137 dest
="geoip_v4_filename",
1138 help=("The filename to write the IPv4 GeoIP dataset to"))
1139 group
.add_option("--geoip-v6-file", "--geoip-v6-file", action
="store",
1140 dest
="geoip_v6_filename",
1141 help=("The filename to write the IPv6 GeoIP dataset to"))
1142 group
.add_option("--geoip-asn-file", "--geoip-asn-file", action
="store",
1143 dest
="geoip_asn_filename",
1144 help=("The filename to write the IPv4 GeoIP ASNum dataset to"))
1145 parser
.add_option_group(group
)
1147 group
= optparse
.OptionGroup(parser
, "Network modes")
1148 (options
, args
) = parser
.parse_args()
1149 if options
.hack_the_internet
:
1150 print("all your bases are belong to us!")
1152 options_dict
= vars(options
)
1154 for mode
in ["init_maxmind", "reload_maxmind", "import_maxmind",
1155 "init_del", "init_lir", "reload_del", "reload_lir",
1156 "download_cc", "erase_cache", "ipv4", "ipv6", "asn",
1157 "cc", "cn", "compare", "what_cc", "init_asn_descriptions",
1158 "reload_asn_descriptions", "init_asn_assignments",
1159 "reload_asn_assignments", "lookup_org_by_ip",
1160 "lookup_org_by_range", "export"]:
1161 if mode
in options_dict
and options_dict
.get(mode
):
1164 parser
.error("only 1 cache or lookup mode allowed")
1166 parser
.error("must provide 1 cache or lookup mode")
1167 database_cache
= DatabaseCache(options
.dir, options
.verbose
)
1168 if options
.erase_cache
:
1169 database_cache
.erase_database()
1171 if not database_cache
.connect_to_database():
1172 print("Could not connect to database.")
1173 print("You may need to erase it using -e and then reload it "
1174 "using -d/-z. Exiting.")
1176 database_cache
.set_db_version()
1177 downloader_parser
= DownloaderParser(options
.dir, database_cache
,
1179 lookup
= Lookup(options
.dir, database_cache
)
1180 if options
.ipv4
or options
.ipv6
or options
.asn
or options
.cc \
1181 or options
.cn
or options
.compare
:
1182 if downloader_parser
.check_rir_file_mtimes():
1183 print("Your cached RIR files are older than 24 hours; you "
1184 "probably want to update them.")
1186 lookup
.asn_lookup(options
.asn
)
1187 elif options
.lookup_org_by_ip
:
1188 lookup
.lookup_org_by_ip(options
.lookup_org_by_ip
)
1189 elif options
.lookup_org_by_range
:
1190 if not (options
.range_start
and options
.range_end
):
1191 print("You must specify the start and end addresses; "
1192 "see --range-start and --range-end")
1194 lookup
.lookup_org_by_range(options
.range_start
, options
.range_end
)
1196 lookup
.lookup_ip_address(options
.ipv4
)
1198 lookup
.lookup_ip_address(options
.ipv6
)
1199 elif options
.cc
or options
.cn
or options
.what_cc
:
1202 country
= options
.cc
.upper()
1203 elif not lookup
.knows_country_names():
1204 print("Need to download country codes first before looking "
1205 "up countries by name.")
1206 elif options
.what_cc
:
1207 country
= options
.what_cc
.upper()
1208 country_name
= lookup
.get_name_from_country_code(country
)
1210 print(("Hmm...%s? That would be %s."
1211 % (options
.what_cc
, country_name
)))
1214 print(("Hmm, %s? We're not sure either. Are you sure that's "
1215 "a country code?" % options
.what_cc
))
1218 country
= lookup
.get_country_code_from_name(options
.cn
)
1220 print("It appears your search did not match a country.")
1222 types
= ["ipv4", "ipv6", "asn"]
1223 if hasattr(options
, 'type_filter') and options
.type_filter
.lower() in types
:
1224 types
= [options
.type_filter
.lower()]
1225 for request
in types
:
1226 print("\n".join(lookup
.fetch_rir_blocks_by_country(\
1228 elif options
.compare
:
1229 print("Comparing assignments with overlapping assignments in other "
1231 lookup
.lookup_countries_in_different_source(options
.compare
)
1232 elif options
.init_maxmind
or options
.reload_maxmind
:
1233 if options
.init_maxmind
:
1234 print("Downloading Maxmind GeoIP files...")
1235 downloader_parser
.download_maxmind_files()
1236 print("Importing Maxmind GeoIP files...")
1237 downloader_parser
.parse_maxmind_files()
1238 elif options
.import_maxmind
:
1239 print("Importing Maxmind GeoIP files...")
1240 downloader_parser
.import_maxmind_file(options
.import_maxmind
)
1241 elif options
.init_del
or options
.reload_del
:
1242 if options
.init_del
:
1243 print("Downloading RIR files...")
1244 downloader_parser
.download_rir_files()
1245 print("Verifying RIR files...")
1246 downloader_parser
.verify_rir_files()
1247 print("Importing RIR files...")
1248 downloader_parser
.parse_rir_files()
1249 elif options
.init_lir
or options
.reload_lir
:
1250 if options
.init_lir
:
1251 print("Downloading LIR delegation files...")
1252 downloader_parser
.download_lir_files()
1253 print("Importing LIR files...")
1254 downloader_parser
.parse_lir_files()
1255 elif options
.download_cc
:
1256 print("Downloading country code file...")
1257 downloader_parser
.download_country_code_file()
1258 elif options
.init_asn_descriptions
or options
.reload_asn_descriptions
:
1259 if options
.init_asn_descriptions
:
1260 print("Downloading ASN Descriptions...")
1261 downloader_parser
.download_asn_description_file()
1262 print("Importing ASN Descriptions...")
1263 downloader_parser
.parse_asn_description_file()
1264 elif options
.init_asn_assignments
or options
.reload_asn_assignments
:
1265 if options
.init_asn_assignments
:
1266 print("Downloading ASN Assignments...")
1267 downloader_parser
.download_asn_assignment_files()
1268 print("Importing ASN Assignments...")
1269 downloader_parser
.parse_asn_assignment_files()
1270 elif options
.export
:
1271 v4_file
= options
.geoip_v4_filename
or "GeoIPCountryWhois.csv"
1272 v6_file
= options
.geoip_v6_filename
or "v6.csv"
1273 asn_file
= options
.geoip_asn_filename
or "GeoIPASNum.csv"
1274 print("Exporting GeoIP IPv4 to %s" % v4_file
)
1275 database_cache
.export_geoip(lookup
, v4_file
, 'ipv4')
1276 print("Exporting GeoIP IPv6 to %s" % v6_file
)
1277 database_cache
.export_geoip(lookup
, v6_file
, 'ipv6')
1278 print("Exporting GeoIP IPv4 ASNum to %s" % asn_file
)
1279 database_cache
.export_asn(asn_file
, 'ipv4')
1281 #print("Exporting GeoIP IPv6 ASNum to %s" % asn_file)
1282 #database_cache.export_geoip(asn_file, 'ipv6')
1283 database_cache
.commit_and_close_database()
1285 if __name__
== "__main__":