Merge pull request #44 from kloesing/ipaddr
[blockfinder.git] / blockfinder
blobd043d6a8212ad4fc23e1894582cdf6e54cb1465f
1 #!/usr/bin/env python
2 # -*- coding: utf-8 -*-
4 # For the people of Smubworld!
5 import urllib2
6 import os
7 import time
8 import optparse
9 import sys
10 import sqlite3
11 import hashlib
12 import gzip
13 import ipaddr
14 __program__ = 'blockfinder'
15 __url__ = 'https://github.com/ioerror/blockfinder/'
16 __author__ = 'Jacob Appelbaum <jacob@appelbaum.net>, David <db@d1b.org>'
17 __copyright__ = 'Copyright (c) 2010'
18 __license__ = 'See LICENSE for licensing information'
19 __version__ = '3.1415'
21 try:
22 import GeoIP
23 except ImportError:
24 GeoIP = None
26 try:
27 from future import antigravity
28 except ImportError:
29 antigravity = None
31 class DatabaseCache:
32 def __init__(self, cache_dir, verbose=False):
33 self.cache_dir = cache_dir
34 self.verbose = verbose
35 self.cursor = None
36 self.conn = None
38 def connect_to_database(self):
39 if not os.path.exists(self.cache_dir):
40 if self.verbose:
41 print "Initializing the cache directory..."
42 os.mkdir(self.cache_dir)
43 self.conn = sqlite3.connect(self.cache_dir + "sqlitedb")
44 self.cursor = self.conn.cursor()
46 def commit_and_close_database(self):
47 self.conn.commit()
48 self.cursor.close()
50 def create_sql_database(self):
51 """ Creates a new sqlite database.
52 Existing delegation entries are dropped prior to inserting
53 'newer' delegations. """
54 sql = ('DROP TABLE IF EXISTS delegations; '
55 'CREATE TABLE delegations(registry TEXT, cc TEXT, '
56 'start TEXT, value INTEGER, date TEXT, status TEXT, '
57 'type TEXT); '
58 'CREATE TABLE IF NOT EXISTS lir_record(cc TEXT, '
59 'start TEXT, value INTEGER, type INTEGER)')
60 self.cursor.executescript(sql)
61 self.conn.commit()
63 def insert_into_sql_database(self, rows):
64 """ Inserts delegation information into the sqlite database. """
65 sql = ('INSERT INTO delegations (registry, cc, start, value, '
66 'date, status, type) VALUES (?, ?, ?, ?, ?, ?, ?)')
67 self.cursor.executemany(sql, rows)
68 self.conn.commit()
70 def _get_total_delegations_from_db(self):
71 """ Returns the total count of the number of entries in the ipv4,
72 ipv6 and asn table. """
73 sql = 'SELECT COUNT(*) FROM delegations'
74 self.cursor.execute(sql)
75 return int(self.cursor.fetchone()[0])
77 def _get_possible_match_entries(self, cc):
78 """ Get the count of 'possible' matching delegation entries. """
79 sql = 'SELECT COUNT(*) FROM delegations WHERE cc = ?'
80 self.cursor.execute(sql, cc)
81 return int(self.cursor.fetchone()[0])
83 def use_sql_database(self, request, cc):
84 """ Use the sqlite database that is created after fetching
85 delegations to output information for a given request. """
86 if self.verbose:
87 print "We have %d entries in our delegation cache." % \
88 self._get_total_delegations_from_db()
89 sql = ('SELECT start, value FROM delegations WHERE type = ? '
90 'AND cc = ?')
91 cc = (cc,)
92 self.cursor.execute(sql, (request, cc[0]))
93 result = []
94 for row in self.cursor:
95 if request == "ipv4":
96 start_ipaddr = ipaddr.IPv4Address(str(row[0]))
97 end_ipaddr = start_ipaddr + int(row[1]) - 1
98 result += [str(x) for x in \
99 ipaddr.summarize_address_range( \
100 start_ipaddr, end_ipaddr)]
101 elif request == "ipv6":
102 result.append(str(row[0]) + "/" + str(int(row[1])))
103 else:
104 result.append(str(int(row[0])))
105 result.sort()
106 if self.verbose:
107 result.append("We found %d possible entries in our "
108 "delegation cache." % \
109 self._get_possible_match_entries(cc))
110 sql = ('SELECT COUNT(*) FROM delegations WHERE cc = ? '
111 'AND type = ?')
112 self.cursor.execute(sql, (cc[0], request))
113 result.append("We found %d matching entries in our "
114 "delegation cache." % int(self.cursor.fetchone()[0]))
115 return result
117 def _rir_or_lir_lookup_ipv4(self, ip_addr, lookup_type):
118 ipv4arr = ip_addr.split('.')
119 if lookup_type == 'rir':
120 sql = ('SELECT cc, start, value FROM delegations '
121 'WHERE type = "ipv4" AND start LIKE ?')
122 self.cursor.execute(sql,
123 (ipv4arr[0] + "." + ipv4arr[1] + ".%",))
124 else:
125 sql = ('SELECT cc, start, value FROM lir_record '
126 'WHERE start LIKE ? AND type = 4')
127 self.cursor.execute(sql,
128 (ipv4arr[0] + "." + ipv4arr[1] + ".%",))
129 row = self.cursor.fetchone()
130 if row is None:
131 if lookup_type == "rir":
132 sql = ('SELECT cc, start, value FROM delegations '
133 'WHERE type = "ipv4" AND start LIKE ?')
134 self.cursor.execute(sql, (ipv4arr[0] + ".%",))
135 else:
136 sql = ('SELECT cc, start, value FROM lir_record '
137 'WHERE start LIKE ? AND type = 4')
138 self.cursor.execute(sql, (ipv4arr[0] + ".%",))
139 row = self.cursor.fetchone()
140 lookup_ipaddr = ipaddr.IPv4Address(ip_addr)
141 while row is not None:
142 start_ipaddr = ipaddr.IPv4Address(str(row[1]))
143 end_ipaddr = start_ipaddr + int(row[2]) - 1
144 if start_ipaddr <= lookup_ipaddr and \
145 lookup_ipaddr <= end_ipaddr:
146 return row[0]
147 row = self.cursor.fetchone()
149 def rir_lookup(self, ip_addr):
150 return self._rir_or_lir_lookup_ipv4(ip_addr, "rir")
152 def lir_lookup(self, ip_addr):
153 return self._rir_or_lir_lookup_ipv4(ip_addr, "lir")
155 def asn_lookup(self, asn):
156 sql = ('SELECT cc FROM delegations WHERE type = "asn" AND '
157 'start LIKE ?')
158 self.cursor.execute(sql, (asn,))
159 row = self.cursor.fetchone()
160 if row:
161 return row[0]
163 def rir_or_lir_lookup_ipv6(self, ip_addr, ip_query, type_q):
164 if type_q == "RIR":
165 sql = ('SELECT cc, start, value FROM delegations '
166 'WHERE type = "ipv6" AND start like ?')
167 self.cursor.execute(sql, (ip_query,))
168 else:
169 sql = ('SELECT cc, start, value FROM lir_record '
170 'WHERE type = 6 AND start LIKE ?')
171 self.cursor.execute(sql, (ip_query,))
172 lookup_ipaddr = ipaddr.IPv6Address(ip_addr)
173 for row in self.cursor:
174 network = ipaddr.IPv6Network(row[1] + "/" + str(row[2]))
175 if lookup_ipaddr in network:
176 return row[0]
178 def create_or_replace_lir_table_in_db(self):
179 sql = 'DROP TABLE IF EXISTS lir_record'
180 self.cursor.execute(sql)
181 sql = ('CREATE TABLE IF NOT EXISTS lir_record(cc TEXT, '
182 'start TEXT, value INTEGER, type INTEGER)')
183 self.cursor.execute(sql)
184 self.conn.commit()
186 def insert_lir_delegation(self, data):
187 sql = ('INSERT INTO lir_record (cc, start, value, type) '
188 'VALUES (?, ?, ?, ?)')
189 self.cursor.execute(sql, data)
190 self.conn.commit()
192 class DownloaderParser:
193 def __init__(self, cache_dir, database_cache, user_agent, \
194 verbose=False):
195 self.cache_dir = cache_dir
196 self.database_cache = database_cache
197 self.user_agent = user_agent
198 self.verbose = verbose
200 def update_progress_bar(self, percent_done, caption=""):
201 """Write a progress bar to the console"""
202 rows, columns = map(int, \
203 os.popen('stty size', 'r').read().split())
204 width = columns - 4 - len(caption)
205 sys.stdout.write("[%s>%s] %s\x1b[G" % (
206 "=" * int(percent_done*width),
207 "." * (width - int(percent_done * width)), caption))
208 sys.stdout.flush()
210 # XXX TODO:allow the use of a proxy
211 # Set up a proper Request object, set the user agent and if desired,
212 # a proxy
213 def fetch(self, url):
214 """ Fetch (with progress meter) and return the contents of a
215 url. """
216 req = urllib2.Request(url)
217 req.add_header('User-Agent', self.user_agent)
218 #req.set_proxy(host, type)
219 fetcher = urllib2.urlopen(req)
220 length_header = fetcher.headers.get("Content-Length")
221 if length_header == None:
222 """ The server did not provide a Content-Length header. """
223 length_header = -1
224 length = int(length_header)
225 print "Fetching ", str(round(float(length/1024),2)), " kilobytes"
226 ret = ""
227 t_start = time.time()
228 while True:
229 t_delta = time.time() - t_start
230 if t_delta == 0:
231 t_delta = 1
232 if length_header != -1:
233 self.update_progress_bar(float(len(ret)) / length,
234 "%.2f K/s" % (len(ret) / 1024 / t_delta))
235 tmp = fetcher.read(1024)
236 if len(tmp) == 0:
237 if len(ret) != length and length_header != -1:
238 raise Exception("Expected %s bytes, only received " \
239 "%s" % (len(ret), length))
240 print ""
241 return ret
242 ret += tmp
244 def write_to_a_text_file(self, file_loc, data):
245 f = open(file_loc, 'w')
246 f.write(data)
247 f.close()
249 def extract_data_from_gzip_file(self, gzip_file_loc, \
250 extract_file_loc):
251 gzip_file = gzip.open(gzip_file_loc, 'rb')
252 gunzipped_file = open(extract_file_loc, 'w')
253 while True:
254 gunzipped_data = gzip_file.read(1024)
255 if gunzipped_data == "":
256 break
257 gunzipped_file.writelines(gunzipped_data)
258 gzip_file.close()
259 gunzipped_file.close()
261 def read_data_from_binary_file(self, fname):
262 f = open(fname, 'rb')
263 data = f.read()
264 f.close()
265 return data
267 def create_blockfinder_cache_dir(self):
268 if not os.path.exists(self.cache_dir):
269 if self.verbose:
270 print "Initializing the cache directory..."
271 os.mkdir(self.cache_dir)
273 def cache_delegation(self, delegation_url):
274 """ Attempt to cache the contents of a delegation url in our
275 cache dir. """
276 delegation = ""
277 print "Fetching " + delegation_url
278 delegation = self.fetch(delegation_url)
279 tmp = delegation_url.split('/')
280 delegation_file = str(self.cache_dir) + str(tmp[-1])
281 try:
282 self.write_to_a_text_file(delegation_file, delegation)
283 return True
284 except Exception, e:
285 print repr(e)
286 return False
288 def cache_is_dated(self, cached_files):
289 """ Returns True if the mtime of any files in cache dir is
290 > 24 hours. """
291 try:
292 os.stat(self.cache_dir)
293 except OSError, e:
294 print "\nDid you initialize the cache directory?\n"
295 raise e
296 for file in cached_files:
297 fstat = os.stat(self.cache_dir + file)
298 if (time.time() - fstat.st_mtime) > 86400:
299 return True
300 return False
302 def get_md5_from_delegation_md5_file(self, delegation_file):
303 """ Returns the md5sum from the delegation md5 file
304 if it doesn't exist it returns an empty string"""
305 checksum = ""
306 try:
307 f = open(self.cache_dir + delegation_file + ".md5", "r")
308 checksum = f.read()
309 f.close()
310 if "=" in checksum:
311 pos = checksum.find("=") +2
312 checksum = str(checksum[pos:-1])
313 except Exception, e:
314 print repr(e)
315 return checksum
317 def verify_delegation_file(self, delegation_file):
318 """ Compares the delegation file md5sum to that of the provided
319 md5sum, returns True if they match otherwise returns
320 False. """
321 checksum = ""
322 checksum_of_file = ""
323 try:
324 data = self.read_data_from_binary_file(self.cache_dir + \
325 delegation_file)
326 checksum_of_file = str(hashlib.md5(data).hexdigest())
327 except Exception, e:
328 print repr(e)
329 checksum = self.get_md5_from_delegation_md5_file(delegation_file)
330 if checksum != checksum_of_file:
331 return False
332 if checksum == checksum_of_file and checksum != "":
333 return True
334 return False
336 def verify_cache(self, delegation_files):
337 """ If in verbose mode prints the result of checking the checksum
338 of the delegation files. """
339 for file in delegation_files:
340 if self.verbose:
341 print "verifying " + file
342 if self.verify_delegation_file(file):
343 if self.verbose:
344 print "the md5 checksum of " + file + \
345 " *matches* the provided checksum"
346 else:
347 if self.verbose:
348 print "the md5 checksum of " + file + \
349 " does *not* match the provided checksum"
351 def update_delegation_cache(self, delegation_urls):
352 """ Fetch multiple delegation urls and cache the contents. """
353 print "Updating delegation cache..."
354 for url in delegation_urls.split():
355 self.cache_delegation(url + ".md5")
356 if self.verify_delegation_file(url.rpartition('/')[-1]):
357 pass
358 else:
359 self.cache_delegation(url)
361 def update_lir_delegation_cache(self, delegation_urls):
362 """ Fetch multiple LIR delegation urls and cache the contents. """
363 print "Updating LIR delegation cache..."
364 for url in delegation_urls.split():
365 self.cache_delegation(url)
366 self.unpack_a_delegation_cache(delegation_urls, "LIR")
368 def unpack_a_delegation_cache(self, delegation_urls, del_type=""):
369 """ Unpack the fetched LIR delegation files into the blockfinder
370 cache. """
371 # This probably should unlink the gzip'ed file if we care about
372 # space...
373 for url in delegation_urls.split():
374 gzip_filename = url.rpartition('/')[-1]
375 gunziped_filename = gzip_filename.rpartition('.')[0]
376 if self.verbose:
377 print "Unpacking " + del_type + "file " + \
378 gzip_filename + " into our cache as " + \
379 gunziped_filename
380 self.extract_data_from_gzip_file(self.cache_dir + \
381 gzip_filename, self.cache_dir + gunziped_filename)
383 def update_geoip_cache(self, geoip_urls):
384 """ Fetch country level resolution GeoIP files from a given url
385 and cache the contents. Unpack it if it's compressed. """
386 print "Updating GeoIP cache..."
387 for url in geoip_urls.split():
388 self.cache_delegation(url)
389 self.unpack_a_delegation_cache(geoip_urls, "GeoIP")
391 def load_delegation(self, delegation_file):
392 """ Load, parse and store the delegation file contents as a
393 list. """
394 keys = "registry cc type start value date status"
395 try:
396 f = open(delegation_file, "r")
397 delegations = [dict((k,v) for k,v in zip(keys.split(), \
398 line.strip().split("|"))) \
399 for line in f.readlines() if not line.startswith("#")]
400 f.close()
401 return delegations
402 except OSError, e:
403 print repr(e)
405 def load_all_delegations(self, delegation_urls):
406 """ Load all delegations into memory. """
407 delegations = []
408 for url in delegation_urls.split():
409 filename = url.rpartition('/')[-1]
410 if self.verbose:
411 print "Attempting to load delegation file into " \
412 + "memory: " + filename
413 delegations.append(self.load_delegation(self.cache_dir + \
414 filename))
415 return delegations
417 def download_country_code_file(self):
418 """ Download and save the latest opencountrycode
419 TXT(';'-separated) file """
420 url = "http://www.iso.org/iso/list-en1-semic-3.txt"
421 print "Fetching " + url
422 text_content = self.fetch(url)
423 self.write_to_a_text_file(self.cache_dir + "countrycodes.txt", \
424 text_content)
426 def extract_info_from_lir_file_and_insert_into_sqlite(self, filename):
427 block = []
428 country = ""
429 entry = False
430 version = ""
431 for line in open(self.cache_dir + filename, "r"):
432 line = line.replace("\n", "")
433 if line == "":
434 entry = False
435 country, block, version = "", [], ""
436 elif not entry and "inetnum:" in line:
437 try:
438 line = line.replace("inetnum:", "").strip()
439 start_addr = line.split("-")[0].strip()
440 end_addr = line.split("-")[1].strip()
441 start_num = int(ipaddr.IPv4Address(start_addr))
442 end_num = int(ipaddr.IPv4Address(end_addr))
443 num_ips = end_num - start_num + 1
444 block = [start_addr, num_ips]
445 entry = True
446 version = "4"
447 except Exception, e:
448 if self.verbose:
449 print repr(e), line
450 elif not entry and "inet6num:" in line:
451 try:
452 block = line.replace("inet6num:", \
453 "").strip().split("/")
454 entry = True
455 version = "6"
456 except Exception, e:
457 if self.verbose:
458 print repr(e), line
459 elif entry and "country:" in line:
460 country = line.replace("country:", "").strip()
461 data = (country, block[0], block[1], version)
462 self.database_cache.insert_lir_delegation(data)
464 def create_db_and_insert_delegation_into_db(self, delegation_urls):
465 self.database_cache.create_sql_database()
466 delegations = self.load_all_delegations(delegation_urls)
467 rows = []
468 for delegation in delegations:
469 for entry in delegation:
470 registry = str(entry['registry'])
471 if not registry.isdigit() and str(entry['cc']) != "*":
472 temp_row = [entry['registry'], entry['cc'], \
473 entry['start'], entry['value'], \
474 entry['date'], entry['status'], entry['type']]
475 rows.append(temp_row)
476 self.database_cache.insert_into_sql_database(rows)
478 class Lookup:
479 def __init__(self, cache_dir, database_cache, verbose=False):
480 self.cache_dir = cache_dir
481 self.database_cache = database_cache
482 self.verbose = verbose
483 self.map_co = None
484 self.build_country_code_dictionary()
486 def build_country_code_dictionary(self):
487 """ Return a dictionary mapping country name to the country
488 code. """
489 if not os.path.exists(self.cache_dir + "countrycodes.txt"):
490 return
491 self.map_co = {}
492 txt_file = str(self.cache_dir) + "countrycodes.txt"
493 for line in open(txt_file, 'r'):
494 line = line.replace("\n", "").replace("\r", "")
495 if line.startswith("This list states the country"):
496 continue
497 if line == "" or ";" not in line:
498 continue
499 name, code = line.split(";")
500 """ capitalize the individual parts of the country name """
501 name = ' '.join([part.capitalize() for part in \
502 name.split(" ")])
503 self.map_co[name] = code
505 def knows_country_names(self):
506 return self.map_co is not None
508 def get_name_from_country_code(self, cc_code):
509 if not self.knows_country_names():
510 return
511 country_name = [(key, value) for (key, value) in \
512 self.map_co.items() if value == cc_code]
513 if len(country_name) > 0:
514 return country_name[0][0]
516 def get_country_code_from_name(self, country_name):
517 """ Return the country code for a given country name. """
518 if not self.knows_country_names():
519 return
520 cc_code = [self.map_co[key] for key in self.map_co.keys() if \
521 key.upper().startswith(country_name.upper())]
522 if len(cc_code) > 0:
523 return cc_code[0]
525 def geoip_lookup(self, ip_addr):
526 # This would work with the CVS version of the GeoIP code
527 # However, MaxMind hasn't done a release in a long time.
528 # http://geoip.cvs.sourceforge.net/viewvc/geoip/python/\
529 # test_v6.py?revision=1.1&view=markup
530 # gi = GeoIP.open(self.cache_dir + \
531 # "GeoIPv6.dat", GeoIP.GEOIP_STANDARD)
532 # cc = gi.country_code_by_addr_v6(ip_addr)
533 # cc_name = gi.country_name_by_addr_v6(ip_addr)
534 gi = GeoIP.open(self.cache_dir + "GeoIP.dat", \
535 GeoIP.GEOIP_STANDARD)
536 cc = gi.country_code_by_addr(ip_addr)
537 cc_name = gi.country_name_by_addr(ip_addr)
538 return cc, cc_name
540 def lookup_ipv6_address(self, ip_addr):
541 print "Reverse lookup for: " + ip_addr
542 split_addr = ip_addr.split(":")
543 for i in ["RIR", "LIR"]:
544 ip_query = ip_addr.split(":")[0] + ":" + \
545 ip_addr.split(":")[1] + "%"
546 cc = self.database_cache.rir_or_lir_lookup_ipv6(ip_addr, \
547 ip_query, i)
548 if cc:
549 print i, "country code:", cc
550 cn = self.get_name_from_country_code(cc)
551 if cn:
552 print i, "country name:", cn
553 else:
554 ip_query = ip_addr.split(":")[0] + ":%"
555 cc = self.database_cache.rir_or_lir_lookup_ipv6(ip_addr, \
556 ip_query, i)
557 print i, "country code:", cc
558 cn = self.get_name_from_country_code(cc)
559 if cn:
560 print i, "country name:", cn
562 def lookup_ipv4_address(self, ip_addr):
563 print "Reverse lookup for: " + ip_addr
564 if GeoIP:
565 geoip_cc, geoip_cc_name = self.geoip_lookup(ip_addr)
566 print "GeoIP country code: " + str(geoip_cc)
567 print "GeoIP country name: " + str(geoip_cc_name)
568 rir_cc = self.database_cache.rir_lookup(ip_addr)
569 if rir_cc:
570 print 'RIR country code:', rir_cc
571 rir_cn = self.get_name_from_country_code(rir_cc)
572 if rir_cn:
573 print 'RIR country:', rir_cn
574 else:
575 print 'Not found in RIR db'
576 lir_cc = self.database_cache.lir_lookup(ip_addr)
577 if lir_cc:
578 print 'LIR country code:', lir_cc
579 lir_cn = self.get_name_from_country_code(lir_cc)
580 if lir_cn:
581 print 'LIR country:', lir_cn
582 if GeoIP:
583 if geoip_cc != rir_cc:
584 print "It appears that the RIR data conflicts with the " \
585 "GeoIP data. The GeoIP data is likely closer " \
586 "to being correct due to sub-delegation issues " \
587 "with LIR databases."
589 def lookup_ip_address(self, ip_addr):
590 """ Return the country code and name for a given ip address.
591 Attempts to use GeoIP if available. """
592 try:
593 lookup_ipaddr = ipaddr.IPAddress(ip_addr)
594 if isinstance(lookup_ipaddr, ipaddr.IPv4Address):
595 self.lookup_ipv4_address(ip_addr)
596 elif isinstance(lookup_ipaddr, ipaddr.IPv6Address):
597 self.lookup_ipv6_address(ip_addr)
598 else:
599 print "Did not recognize '%s' as either IPv4 or IPv6 " \
600 "address." % ip_addr
601 except ValueError, e:
602 print "'%s' is not a valid IP address." % ip_addr
604 def asn_lookup(self, asn):
605 asn_cc = self.database_cache.asn_lookup(asn)
606 if asn_cc:
607 print "AS country code: %s" % asn_cc
608 asn_cn = self.get_name_from_country_code(asn_cc)
609 if asn_cn:
610 print "AS country name: %s" % asn_cn
611 else:
612 print "AS%s not found!" % asn
614 def fetch_rir_blocks_by_country(self, request, country):
615 return self.database_cache.use_sql_database(request, country)
617 def main():
618 """ Where the magic starts. """
619 usage = "Usage: %prog [options]\n\n" \
620 "Example: %prog -v -t mm"
621 parser = optparse.OptionParser(usage)
622 parser.add_option("-v", "--verbose", action="store_true", \
623 dest="verbose", help = "be verbose", default=False)
624 parser.add_option("-c", "--cache-dir", action="store", dest="dir", \
625 help="set cache directory [default: %default]", \
626 default=str(os.path.expanduser('~')) + "/.blockfinder/")
627 parser.add_option("--user-agent", action="store", dest="ua", \
628 help=('provide a User-Agent which will be used when '
629 'fetching delegation files [default: "%default"]'), \
630 default="Mozilla/5.0")
631 parser.add_option("-x", "--hack-the-internet", action="store_true", \
632 dest="hack_the_internet", help=optparse.SUPPRESS_HELP)
633 group = optparse.OptionGroup(parser, "Cache modes",
634 "Pick at most one of these modes to initialize or update " \
635 "the local cache. May not be combined with lookup modes.")
636 group.add_option("-i", "--init-rir", \
637 action="store_true", dest="init_del", \
638 help="initialize or update delegation information")
639 group.add_option("-d", "--reload-rir", action="store_true", \
640 dest="reload_del", \
641 help="use existing delegation files to update the database")
642 group.add_option("-l", "--init-lir", action="store_true", \
643 dest="init_lir",
644 help=("initialize or update lir information; can take up to "
645 "5 minutes"))
646 group.add_option("-z", "--reload-lir", action="store_true",
647 dest="reload_lir", \
648 help=("use existing lir files to update the database; can "
649 "take up to 5 minutes"))
650 group.add_option("-o", "--download-cc", action="store_true",
651 dest="download_cc", help="download country codes file")
652 parser.add_option_group(group)
653 group = optparse.OptionGroup(parser, "Lookup modes",
654 "Pick at most one of these modes to look up data in the " \
655 "local cache. May not be combined with cache modes.")
656 group.add_option("-4", "--ipv4", action="store", dest="ipv4", \
657 help=("look up country code and name for the specified IPv4 "
658 "address"))
659 group.add_option("-6", "--ipv6", action="store", dest="ipv6", \
660 help=("look up country code and name for the specified IPv6 "
661 "address"))
662 group.add_option("-a", "--asn", action="store", dest="asn", \
663 help="look up country code and name for the specified ASN")
664 group.add_option("-t", "--code", action="store", dest="cc", \
665 help=("look up all allocations in the delegation cache for "
666 "the specified two-letter country code"))
667 group.add_option("-n", "--name", action="store", dest="cn", \
668 help=("look up all allocations in the delegation cache for "
669 "the specified full country name"))
670 parser.add_option_group(group)
671 group = optparse.OptionGroup(parser, "Network modes")
672 (options, args) = parser.parse_args()
673 if options.hack_the_internet:
674 print "all your bases are belong to us!"
675 sys.exit(0)
676 options_dict = vars(options)
677 modes = 0
678 for mode in ["init_del", "init_lir", "reload_del", "reload_lir",
679 "download_cc", "ipv4", "ipv6", "asn", "cc", "cn"]:
680 if options_dict.has_key(mode) and options_dict.get(mode):
681 modes += 1
682 if modes > 1:
683 parser.error("only 1 cache or lookup mode allowed")
684 elif modes == 0:
685 parser.error("must provide 1 cache or lookup mode")
686 database_cache = DatabaseCache(options.dir, options.verbose)
687 database_cache.connect_to_database()
688 downloader_parser = DownloaderParser(options.dir, database_cache, \
689 options.ua)
690 lookup = Lookup(options.dir, database_cache)
691 delegation_urls = """
692 ftp://ftp.arin.net/pub/stats/arin/delegated-arin-latest
693 ftp://ftp.ripe.net/ripe/stats/delegated-ripencc-latest
694 ftp://ftp.afrinic.net/pub/stats/afrinic/delegated-afrinic-latest
695 ftp://ftp.apnic.net/pub/stats/apnic/delegated-apnic-latest
696 ftp://ftp.lacnic.net/pub/stats/lacnic/delegated-lacnic-latest
698 geoip_country_urls = """http://geolite.maxmind.com/download/geoip/database/GeoLiteCountry/GeoIP.dat.gz
699 http://geolite.maxmind.com/download/geoip/database/GeoIPv6.dat.gz"""
700 lir_urls = """ftp://ftp.ripe.net/ripe/dbase/split/ripe.db.inetnum.gz
701 ftp://ftp.ripe.net/ripe/dbase/split/ripe.db.inet6num.gz"""
702 delegation_files = []
703 for url in delegation_urls.split():
704 filename = url.rpartition('/')
705 delegation_files.append(filename[-1])
706 downloader_parser.create_blockfinder_cache_dir()
707 if options.ipv4 or options.ipv6 or options.asn or options.cc \
708 or options.cn:
709 if downloader_parser.cache_is_dated(delegation_files):
710 print "Your delegation cache is older than 24 hours; you " \
711 "probably want to update it."
712 if options.asn:
713 lookup.asn_lookup(options.asn)
714 elif options.ipv4:
715 lookup.lookup_ip_address(options.ipv4)
716 elif options.ipv6:
717 lookup.lookup_ip_address(options.ipv6)
718 elif options.cc or options.cn:
719 country = None
720 if options.cc:
721 country = options.cc.upper()
722 elif not lookup.knows_country_names():
723 print "Need to download country codes first before looking " \
724 "up countries by name."
725 else:
726 country = lookup.get_country_code_from_name(options.cn)
727 if not country:
728 print "It appears your search did not match a country."
729 if country:
730 for request in ["ipv4", "ipv6", "asn"]:
731 print "\n".join(lookup.fetch_rir_blocks_by_country(\
732 request, country))
733 elif options.init_del or options.reload_del:
734 if options.init_del:
735 if GeoIP:
736 downloader_parser.update_geoip_cache(geoip_country_urls)
737 downloader_parser.update_delegation_cache(delegation_urls)
738 if options.verbose:
739 lookup.verify_cache(delegation_files)
740 downloader_parser.create_db_and_insert_delegation_into_db(\
741 delegation_urls)
742 elif options.init_lir or options.reload_lir:
743 if options.init_lir:
744 downloader_parser.update_lir_delegation_cache(lir_urls)
745 print "Extracting and inserting information from the lir files " \
746 "can take up to 5 minutes"
747 database_cache.create_or_replace_lir_table_in_db()
748 for fname in "ripe.db.inetnum ripe.db.inet6num".split():
749 downloader_parser.extract_info_from_lir_file_and_insert_into_sqlite(fname)
750 elif options.download_cc:
751 downloader_parser.download_country_code_file()
752 database_cache.commit_and_close_database()
754 if __name__ == "__main__":
755 main()