2 # -*- coding: utf-8 -*-
4 # For the people of Smubworld!
14 __program__
= 'blockfinder'
15 __url__
= 'https://github.com/ioerror/blockfinder/'
16 __author__
= 'Jacob Appelbaum <jacob@appelbaum.net>, David <db@d1b.org>'
17 __copyright__
= 'Copyright (c) 2010'
18 __license__
= 'See LICENSE for licensing information'
19 __version__
= '3.1415'
27 from future
import antigravity
32 def __init__(self
, cache_dir
, verbose
=False):
33 self
.cache_dir
= cache_dir
34 self
.verbose
= verbose
38 def connect_to_database(self
):
39 if not os
.path
.exists(self
.cache_dir
):
41 print "Initializing the cache directory..."
42 os
.mkdir(self
.cache_dir
)
43 self
.conn
= sqlite3
.connect(self
.cache_dir
+ "sqlitedb")
44 self
.cursor
= self
.conn
.cursor()
46 def commit_and_close_database(self
):
50 def create_sql_database(self
):
51 """ Creates a new sqlite database.
52 Existing delegation entries are dropped prior to inserting
53 'newer' delegations. """
54 sql
= ('DROP TABLE IF EXISTS delegations; '
55 'CREATE TABLE delegations(registry TEXT, cc TEXT, '
56 'start TEXT, value INTEGER, date TEXT, status TEXT, '
58 'CREATE TABLE IF NOT EXISTS lir_record(cc TEXT, '
59 'start TEXT, value INTEGER, type INTEGER)')
60 self
.cursor
.executescript(sql
)
63 def insert_into_sql_database(self
, rows
):
64 """ Inserts delegation information into the sqlite database. """
65 sql
= ('INSERT INTO delegations (registry, cc, start, value, '
66 'date, status, type) VALUES (?, ?, ?, ?, ?, ?, ?)')
67 self
.cursor
.executemany(sql
, rows
)
70 def _get_total_delegations_from_db(self
):
71 """ Returns the total count of the number of entries in the ipv4,
72 ipv6 and asn table. """
73 sql
= 'SELECT COUNT(*) FROM delegations'
74 self
.cursor
.execute(sql
)
75 return int(self
.cursor
.fetchone()[0])
77 def _get_possible_match_entries(self
, cc
):
78 """ Get the count of 'possible' matching delegation entries. """
79 sql
= 'SELECT COUNT(*) FROM delegations WHERE cc = ?'
80 self
.cursor
.execute(sql
, cc
)
81 return int(self
.cursor
.fetchone()[0])
83 def use_sql_database(self
, request
, cc
):
84 """ Use the sqlite database that is created after fetching
85 delegations to output information for a given request. """
87 print "We have %d entries in our delegation cache." % \
88 self
._get
_total
_delegations
_from
_db
()
89 sql
= ('SELECT start, value FROM delegations WHERE type = ? '
92 self
.cursor
.execute(sql
, (request
, cc
[0]))
94 for row
in self
.cursor
:
96 start_ipaddr
= ipaddr
.IPv4Address(str(row
[0]))
97 end_ipaddr
= start_ipaddr
+ int(row
[1]) - 1
98 result
+= [str(x
) for x
in \
99 ipaddr
.summarize_address_range( \
100 start_ipaddr
, end_ipaddr
)]
101 elif request
== "ipv6":
102 result
.append(str(row
[0]) + "/" + str(int(row
[1])))
104 result
.append(str(int(row
[0])))
107 result
.append("We found %d possible entries in our "
108 "delegation cache." % \
109 self
._get
_possible
_match
_entries
(cc
))
110 sql
= ('SELECT COUNT(*) FROM delegations WHERE cc = ? '
112 self
.cursor
.execute(sql
, (cc
[0], request
))
113 result
.append("We found %d matching entries in our "
114 "delegation cache." % int(self
.cursor
.fetchone()[0]))
117 def _rir_or_lir_lookup_ipv4(self
, ip_addr
, lookup_type
):
118 ipv4arr
= ip_addr
.split('.')
119 if lookup_type
== 'rir':
120 sql
= ('SELECT cc, start, value FROM delegations '
121 'WHERE type = "ipv4" AND start LIKE ?')
122 self
.cursor
.execute(sql
,
123 (ipv4arr
[0] + "." + ipv4arr
[1] + ".%",))
125 sql
= ('SELECT cc, start, value FROM lir_record '
126 'WHERE start LIKE ? AND type = 4')
127 self
.cursor
.execute(sql
,
128 (ipv4arr
[0] + "." + ipv4arr
[1] + ".%",))
129 row
= self
.cursor
.fetchone()
131 if lookup_type
== "rir":
132 sql
= ('SELECT cc, start, value FROM delegations '
133 'WHERE type = "ipv4" AND start LIKE ?')
134 self
.cursor
.execute(sql
, (ipv4arr
[0] + ".%",))
136 sql
= ('SELECT cc, start, value FROM lir_record '
137 'WHERE start LIKE ? AND type = 4')
138 self
.cursor
.execute(sql
, (ipv4arr
[0] + ".%",))
139 row
= self
.cursor
.fetchone()
140 lookup_ipaddr
= ipaddr
.IPv4Address(ip_addr
)
141 while row
is not None:
142 start_ipaddr
= ipaddr
.IPv4Address(str(row
[1]))
143 end_ipaddr
= start_ipaddr
+ int(row
[2]) - 1
144 if start_ipaddr
<= lookup_ipaddr
and \
145 lookup_ipaddr
<= end_ipaddr
:
147 row
= self
.cursor
.fetchone()
149 def rir_lookup(self
, ip_addr
):
150 return self
._rir
_or
_lir
_lookup
_ipv
4(ip_addr
, "rir")
152 def lir_lookup(self
, ip_addr
):
153 return self
._rir
_or
_lir
_lookup
_ipv
4(ip_addr
, "lir")
155 def asn_lookup(self
, asn
):
156 sql
= ('SELECT cc FROM delegations WHERE type = "asn" AND '
158 self
.cursor
.execute(sql
, (asn
,))
159 row
= self
.cursor
.fetchone()
163 def rir_or_lir_lookup_ipv6(self
, ip_addr
, ip_query
, type_q
):
165 sql
= ('SELECT cc, start, value FROM delegations '
166 'WHERE type = "ipv6" AND start like ?')
167 self
.cursor
.execute(sql
, (ip_query
,))
169 sql
= ('SELECT cc, start, value FROM lir_record '
170 'WHERE type = 6 AND start LIKE ?')
171 self
.cursor
.execute(sql
, (ip_query
,))
172 lookup_ipaddr
= ipaddr
.IPv6Address(ip_addr
)
173 for row
in self
.cursor
:
174 network
= ipaddr
.IPv6Network(row
[1] + "/" + str(row
[2]))
175 if lookup_ipaddr
in network
:
178 def create_or_replace_lir_table_in_db(self
):
179 sql
= 'DROP TABLE IF EXISTS lir_record'
180 self
.cursor
.execute(sql
)
181 sql
= ('CREATE TABLE IF NOT EXISTS lir_record(cc TEXT, '
182 'start TEXT, value INTEGER, type INTEGER)')
183 self
.cursor
.execute(sql
)
186 def insert_lir_delegation(self
, data
):
187 sql
= ('INSERT INTO lir_record (cc, start, value, type) '
188 'VALUES (?, ?, ?, ?)')
189 self
.cursor
.execute(sql
, data
)
192 class DownloaderParser
:
193 def __init__(self
, cache_dir
, database_cache
, user_agent
, \
195 self
.cache_dir
= cache_dir
196 self
.database_cache
= database_cache
197 self
.user_agent
= user_agent
198 self
.verbose
= verbose
200 def update_progress_bar(self
, percent_done
, caption
=""):
201 """Write a progress bar to the console"""
202 rows
, columns
= map(int, \
203 os
.popen('stty size', 'r').read().split())
204 width
= columns
- 4 - len(caption
)
205 sys
.stdout
.write("[%s>%s] %s\x1b[G" % (
206 "=" * int(percent_done
*width
),
207 "." * (width
- int(percent_done
* width
)), caption
))
210 # XXX TODO:allow the use of a proxy
211 # Set up a proper Request object, set the user agent and if desired,
213 def fetch(self
, url
):
214 """ Fetch (with progress meter) and return the contents of a
216 req
= urllib2
.Request(url
)
217 req
.add_header('User-Agent', self
.user_agent
)
218 #req.set_proxy(host, type)
219 fetcher
= urllib2
.urlopen(req
)
220 length_header
= fetcher
.headers
.get("Content-Length")
221 if length_header
== None:
222 """ The server did not provide a Content-Length header. """
224 length
= int(length_header
)
225 print "Fetching ", str(round(float(length
/1024),2)), " kilobytes"
227 t_start
= time
.time()
229 t_delta
= time
.time() - t_start
232 if length_header
!= -1:
233 self
.update_progress_bar(float(len(ret
)) / length
,
234 "%.2f K/s" % (len(ret
) / 1024 / t_delta
))
235 tmp
= fetcher
.read(1024)
237 if len(ret
) != length
and length_header
!= -1:
238 raise Exception("Expected %s bytes, only received " \
239 "%s" % (len(ret
), length
))
244 def write_to_a_text_file(self
, file_loc
, data
):
245 f
= open(file_loc
, 'w')
249 def extract_data_from_gzip_file(self
, gzip_file_loc
, \
251 gzip_file
= gzip
.open(gzip_file_loc
, 'rb')
252 gunzipped_file
= open(extract_file_loc
, 'w')
254 gunzipped_data
= gzip_file
.read(1024)
255 if gunzipped_data
== "":
257 gunzipped_file
.writelines(gunzipped_data
)
259 gunzipped_file
.close()
261 def read_data_from_binary_file(self
, fname
):
262 f
= open(fname
, 'rb')
267 def create_blockfinder_cache_dir(self
):
268 if not os
.path
.exists(self
.cache_dir
):
270 print "Initializing the cache directory..."
271 os
.mkdir(self
.cache_dir
)
273 def cache_delegation(self
, delegation_url
):
274 """ Attempt to cache the contents of a delegation url in our
277 print "Fetching " + delegation_url
278 delegation
= self
.fetch(delegation_url
)
279 tmp
= delegation_url
.split('/')
280 delegation_file
= str(self
.cache_dir
) + str(tmp
[-1])
282 self
.write_to_a_text_file(delegation_file
, delegation
)
288 def cache_is_dated(self
, cached_files
):
289 """ Returns True if the mtime of any files in cache dir is
292 os
.stat(self
.cache_dir
)
294 print "\nDid you initialize the cache directory?\n"
296 for file in cached_files
:
297 fstat
= os
.stat(self
.cache_dir
+ file)
298 if (time
.time() - fstat
.st_mtime
) > 86400:
302 def get_md5_from_delegation_md5_file(self
, delegation_file
):
303 """ Returns the md5sum from the delegation md5 file
304 if it doesn't exist it returns an empty string"""
307 f
= open(self
.cache_dir
+ delegation_file
+ ".md5", "r")
311 pos
= checksum
.find("=") +2
312 checksum
= str(checksum
[pos
:-1])
317 def verify_delegation_file(self
, delegation_file
):
318 """ Compares the delegation file md5sum to that of the provided
319 md5sum, returns True if they match otherwise returns
322 checksum_of_file
= ""
324 data
= self
.read_data_from_binary_file(self
.cache_dir
+ \
326 checksum_of_file
= str(hashlib
.md5(data
).hexdigest())
329 checksum
= self
.get_md5_from_delegation_md5_file(delegation_file
)
330 if checksum
!= checksum_of_file
:
332 if checksum
== checksum_of_file
and checksum
!= "":
336 def verify_cache(self
, delegation_files
):
337 """ If in verbose mode prints the result of checking the checksum
338 of the delegation files. """
339 for file in delegation_files
:
341 print "verifying " + file
342 if self
.verify_delegation_file(file):
344 print "the md5 checksum of " + file + \
345 " *matches* the provided checksum"
348 print "the md5 checksum of " + file + \
349 " does *not* match the provided checksum"
351 def update_delegation_cache(self
, delegation_urls
):
352 """ Fetch multiple delegation urls and cache the contents. """
353 print "Updating delegation cache..."
354 for url
in delegation_urls
.split():
355 self
.cache_delegation(url
+ ".md5")
356 if self
.verify_delegation_file(url
.rpartition('/')[-1]):
359 self
.cache_delegation(url
)
361 def update_lir_delegation_cache(self
, delegation_urls
):
362 """ Fetch multiple LIR delegation urls and cache the contents. """
363 print "Updating LIR delegation cache..."
364 for url
in delegation_urls
.split():
365 self
.cache_delegation(url
)
366 self
.unpack_a_delegation_cache(delegation_urls
, "LIR")
368 def unpack_a_delegation_cache(self
, delegation_urls
, del_type
=""):
369 """ Unpack the fetched LIR delegation files into the blockfinder
371 # This probably should unlink the gzip'ed file if we care about
373 for url
in delegation_urls
.split():
374 gzip_filename
= url
.rpartition('/')[-1]
375 gunziped_filename
= gzip_filename
.rpartition('.')[0]
377 print "Unpacking " + del_type
+ "file " + \
378 gzip_filename
+ " into our cache as " + \
380 self
.extract_data_from_gzip_file(self
.cache_dir
+ \
381 gzip_filename
, self
.cache_dir
+ gunziped_filename
)
383 def update_geoip_cache(self
, geoip_urls
):
384 """ Fetch country level resolution GeoIP files from a given url
385 and cache the contents. Unpack it if it's compressed. """
386 print "Updating GeoIP cache..."
387 for url
in geoip_urls
.split():
388 self
.cache_delegation(url
)
389 self
.unpack_a_delegation_cache(geoip_urls
, "GeoIP")
391 def load_delegation(self
, delegation_file
):
392 """ Load, parse and store the delegation file contents as a
394 keys
= "registry cc type start value date status"
396 f
= open(delegation_file
, "r")
397 delegations
= [dict((k
,v
) for k
,v
in zip(keys
.split(), \
398 line
.strip().split("|"))) \
399 for line
in f
.readlines() if not line
.startswith("#")]
405 def load_all_delegations(self
, delegation_urls
):
406 """ Load all delegations into memory. """
408 for url
in delegation_urls
.split():
409 filename
= url
.rpartition('/')[-1]
411 print "Attempting to load delegation file into " \
412 + "memory: " + filename
413 delegations
.append(self
.load_delegation(self
.cache_dir
+ \
417 def download_country_code_file(self
):
418 """ Download and save the latest opencountrycode
419 TXT(';'-separated) file """
420 url
= "http://www.iso.org/iso/list-en1-semic-3.txt"
421 print "Fetching " + url
422 text_content
= self
.fetch(url
)
423 self
.write_to_a_text_file(self
.cache_dir
+ "countrycodes.txt", \
426 def extract_info_from_lir_file_and_insert_into_sqlite(self
, filename
):
431 for line
in open(self
.cache_dir
+ filename
, "r"):
432 line
= line
.replace("\n", "")
435 country
, block
, version
= "", [], ""
436 elif not entry
and "inetnum:" in line
:
438 line
= line
.replace("inetnum:", "").strip()
439 start_addr
= line
.split("-")[0].strip()
440 end_addr
= line
.split("-")[1].strip()
441 start_num
= int(ipaddr
.IPv4Address(start_addr
))
442 end_num
= int(ipaddr
.IPv4Address(end_addr
))
443 num_ips
= end_num
- start_num
+ 1
444 block
= [start_addr
, num_ips
]
450 elif not entry
and "inet6num:" in line
:
452 block
= line
.replace("inet6num:", \
453 "").strip().split("/")
459 elif entry
and "country:" in line
:
460 country
= line
.replace("country:", "").strip()
461 data
= (country
, block
[0], block
[1], version
)
462 self
.database_cache
.insert_lir_delegation(data
)
464 def create_db_and_insert_delegation_into_db(self
, delegation_urls
):
465 self
.database_cache
.create_sql_database()
466 delegations
= self
.load_all_delegations(delegation_urls
)
468 for delegation
in delegations
:
469 for entry
in delegation
:
470 registry
= str(entry
['registry'])
471 if not registry
.isdigit() and str(entry
['cc']) != "*":
472 temp_row
= [entry
['registry'], entry
['cc'], \
473 entry
['start'], entry
['value'], \
474 entry
['date'], entry
['status'], entry
['type']]
475 rows
.append(temp_row
)
476 self
.database_cache
.insert_into_sql_database(rows
)
479 def __init__(self
, cache_dir
, database_cache
, verbose
=False):
480 self
.cache_dir
= cache_dir
481 self
.database_cache
= database_cache
482 self
.verbose
= verbose
484 self
.build_country_code_dictionary()
486 def build_country_code_dictionary(self
):
487 """ Return a dictionary mapping country name to the country
489 if not os
.path
.exists(self
.cache_dir
+ "countrycodes.txt"):
492 txt_file
= str(self
.cache_dir
) + "countrycodes.txt"
493 for line
in open(txt_file
, 'r'):
494 line
= line
.replace("\n", "").replace("\r", "")
495 if line
.startswith("This list states the country"):
497 if line
== "" or ";" not in line
:
499 name
, code
= line
.split(";")
500 """ capitalize the individual parts of the country name """
501 name
= ' '.join([part
.capitalize() for part
in \
503 self
.map_co
[name
] = code
505 def knows_country_names(self
):
506 return self
.map_co
is not None
508 def get_name_from_country_code(self
, cc_code
):
509 if not self
.knows_country_names():
511 country_name
= [(key
, value
) for (key
, value
) in \
512 self
.map_co
.items() if value
== cc_code
]
513 if len(country_name
) > 0:
514 return country_name
[0][0]
516 def get_country_code_from_name(self
, country_name
):
517 """ Return the country code for a given country name. """
518 if not self
.knows_country_names():
520 cc_code
= [self
.map_co
[key
] for key
in self
.map_co
.keys() if \
521 key
.upper().startswith(country_name
.upper())]
525 def geoip_lookup(self
, ip_addr
):
526 # This would work with the CVS version of the GeoIP code
527 # However, MaxMind hasn't done a release in a long time.
528 # http://geoip.cvs.sourceforge.net/viewvc/geoip/python/\
529 # test_v6.py?revision=1.1&view=markup
530 # gi = GeoIP.open(self.cache_dir + \
531 # "GeoIPv6.dat", GeoIP.GEOIP_STANDARD)
532 # cc = gi.country_code_by_addr_v6(ip_addr)
533 # cc_name = gi.country_name_by_addr_v6(ip_addr)
534 gi
= GeoIP
.open(self
.cache_dir
+ "GeoIP.dat", \
535 GeoIP
.GEOIP_STANDARD
)
536 cc
= gi
.country_code_by_addr(ip_addr
)
537 cc_name
= gi
.country_name_by_addr(ip_addr
)
540 def lookup_ipv6_address(self
, ip_addr
):
541 print "Reverse lookup for: " + ip_addr
542 split_addr
= ip_addr
.split(":")
543 for i
in ["RIR", "LIR"]:
544 ip_query
= ip_addr
.split(":")[0] + ":" + \
545 ip_addr
.split(":")[1] + "%"
546 cc
= self
.database_cache
.rir_or_lir_lookup_ipv6(ip_addr
, \
549 print i
, "country code:", cc
550 cn
= self
.get_name_from_country_code(cc
)
552 print i
, "country name:", cn
554 ip_query
= ip_addr
.split(":")[0] + ":%"
555 cc
= self
.database_cache
.rir_or_lir_lookup_ipv6(ip_addr
, \
557 print i
, "country code:", cc
558 cn
= self
.get_name_from_country_code(cc
)
560 print i
, "country name:", cn
562 def lookup_ipv4_address(self
, ip_addr
):
563 print "Reverse lookup for: " + ip_addr
565 geoip_cc
, geoip_cc_name
= self
.geoip_lookup(ip_addr
)
566 print "GeoIP country code: " + str(geoip_cc
)
567 print "GeoIP country name: " + str(geoip_cc_name
)
568 rir_cc
= self
.database_cache
.rir_lookup(ip_addr
)
570 print 'RIR country code:', rir_cc
571 rir_cn
= self
.get_name_from_country_code(rir_cc
)
573 print 'RIR country:', rir_cn
575 print 'Not found in RIR db'
576 lir_cc
= self
.database_cache
.lir_lookup(ip_addr
)
578 print 'LIR country code:', lir_cc
579 lir_cn
= self
.get_name_from_country_code(lir_cc
)
581 print 'LIR country:', lir_cn
583 if geoip_cc
!= rir_cc
:
584 print "It appears that the RIR data conflicts with the " \
585 "GeoIP data. The GeoIP data is likely closer " \
586 "to being correct due to sub-delegation issues " \
587 "with LIR databases."
589 def lookup_ip_address(self
, ip_addr
):
590 """ Return the country code and name for a given ip address.
591 Attempts to use GeoIP if available. """
593 lookup_ipaddr
= ipaddr
.IPAddress(ip_addr
)
594 if isinstance(lookup_ipaddr
, ipaddr
.IPv4Address
):
595 self
.lookup_ipv4_address(ip_addr
)
596 elif isinstance(lookup_ipaddr
, ipaddr
.IPv6Address
):
597 self
.lookup_ipv6_address(ip_addr
)
599 print "Did not recognize '%s' as either IPv4 or IPv6 " \
601 except ValueError, e
:
602 print "'%s' is not a valid IP address." % ip_addr
604 def asn_lookup(self
, asn
):
605 asn_cc
= self
.database_cache
.asn_lookup(asn
)
607 print "AS country code: %s" % asn_cc
608 asn_cn
= self
.get_name_from_country_code(asn_cc
)
610 print "AS country name: %s" % asn_cn
612 print "AS%s not found!" % asn
614 def fetch_rir_blocks_by_country(self
, request
, country
):
615 return self
.database_cache
.use_sql_database(request
, country
)
618 """ Where the magic starts. """
619 usage
= "Usage: %prog [options]\n\n" \
620 "Example: %prog -v -t mm"
621 parser
= optparse
.OptionParser(usage
)
622 parser
.add_option("-v", "--verbose", action
="store_true", \
623 dest
="verbose", help = "be verbose", default
=False)
624 parser
.add_option("-c", "--cache-dir", action
="store", dest
="dir", \
625 help="set cache directory [default: %default]", \
626 default
=str(os
.path
.expanduser('~')) + "/.blockfinder/")
627 parser
.add_option("--user-agent", action
="store", dest
="ua", \
628 help=('provide a User-Agent which will be used when '
629 'fetching delegation files [default: "%default"]'), \
630 default
="Mozilla/5.0")
631 parser
.add_option("-x", "--hack-the-internet", action
="store_true", \
632 dest
="hack_the_internet", help=optparse
.SUPPRESS_HELP
)
633 group
= optparse
.OptionGroup(parser
, "Cache modes",
634 "Pick at most one of these modes to initialize or update " \
635 "the local cache. May not be combined with lookup modes.")
636 group
.add_option("-i", "--init-rir", \
637 action
="store_true", dest
="init_del", \
638 help="initialize or update delegation information")
639 group
.add_option("-d", "--reload-rir", action
="store_true", \
641 help="use existing delegation files to update the database")
642 group
.add_option("-l", "--init-lir", action
="store_true", \
644 help=("initialize or update lir information; can take up to "
646 group
.add_option("-z", "--reload-lir", action
="store_true",
648 help=("use existing lir files to update the database; can "
649 "take up to 5 minutes"))
650 group
.add_option("-o", "--download-cc", action
="store_true",
651 dest
="download_cc", help="download country codes file")
652 parser
.add_option_group(group
)
653 group
= optparse
.OptionGroup(parser
, "Lookup modes",
654 "Pick at most one of these modes to look up data in the " \
655 "local cache. May not be combined with cache modes.")
656 group
.add_option("-4", "--ipv4", action
="store", dest
="ipv4", \
657 help=("look up country code and name for the specified IPv4 "
659 group
.add_option("-6", "--ipv6", action
="store", dest
="ipv6", \
660 help=("look up country code and name for the specified IPv6 "
662 group
.add_option("-a", "--asn", action
="store", dest
="asn", \
663 help="look up country code and name for the specified ASN")
664 group
.add_option("-t", "--code", action
="store", dest
="cc", \
665 help=("look up all allocations in the delegation cache for "
666 "the specified two-letter country code"))
667 group
.add_option("-n", "--name", action
="store", dest
="cn", \
668 help=("look up all allocations in the delegation cache for "
669 "the specified full country name"))
670 parser
.add_option_group(group
)
671 group
= optparse
.OptionGroup(parser
, "Network modes")
672 (options
, args
) = parser
.parse_args()
673 if options
.hack_the_internet
:
674 print "all your bases are belong to us!"
676 options_dict
= vars(options
)
678 for mode
in ["init_del", "init_lir", "reload_del", "reload_lir",
679 "download_cc", "ipv4", "ipv6", "asn", "cc", "cn"]:
680 if options_dict
.has_key(mode
) and options_dict
.get(mode
):
683 parser
.error("only 1 cache or lookup mode allowed")
685 parser
.error("must provide 1 cache or lookup mode")
686 database_cache
= DatabaseCache(options
.dir, options
.verbose
)
687 database_cache
.connect_to_database()
688 downloader_parser
= DownloaderParser(options
.dir, database_cache
, \
690 lookup
= Lookup(options
.dir, database_cache
)
691 delegation_urls
= """
692 ftp://ftp.arin.net/pub/stats/arin/delegated-arin-latest
693 ftp://ftp.ripe.net/ripe/stats/delegated-ripencc-latest
694 ftp://ftp.afrinic.net/pub/stats/afrinic/delegated-afrinic-latest
695 ftp://ftp.apnic.net/pub/stats/apnic/delegated-apnic-latest
696 ftp://ftp.lacnic.net/pub/stats/lacnic/delegated-lacnic-latest
698 geoip_country_urls
= """http://geolite.maxmind.com/download/geoip/database/GeoLiteCountry/GeoIP.dat.gz
699 http://geolite.maxmind.com/download/geoip/database/GeoIPv6.dat.gz"""
700 lir_urls
= """ftp://ftp.ripe.net/ripe/dbase/split/ripe.db.inetnum.gz
701 ftp://ftp.ripe.net/ripe/dbase/split/ripe.db.inet6num.gz"""
702 delegation_files
= []
703 for url
in delegation_urls
.split():
704 filename
= url
.rpartition('/')
705 delegation_files
.append(filename
[-1])
706 downloader_parser
.create_blockfinder_cache_dir()
707 if options
.ipv4
or options
.ipv6
or options
.asn
or options
.cc \
709 if downloader_parser
.cache_is_dated(delegation_files
):
710 print "Your delegation cache is older than 24 hours; you " \
711 "probably want to update it."
713 lookup
.asn_lookup(options
.asn
)
715 lookup
.lookup_ip_address(options
.ipv4
)
717 lookup
.lookup_ip_address(options
.ipv6
)
718 elif options
.cc
or options
.cn
:
721 country
= options
.cc
.upper()
722 elif not lookup
.knows_country_names():
723 print "Need to download country codes first before looking " \
724 "up countries by name."
726 country
= lookup
.get_country_code_from_name(options
.cn
)
728 print "It appears your search did not match a country."
730 for request
in ["ipv4", "ipv6", "asn"]:
731 print "\n".join(lookup
.fetch_rir_blocks_by_country(\
733 elif options
.init_del
or options
.reload_del
:
736 downloader_parser
.update_geoip_cache(geoip_country_urls
)
737 downloader_parser
.update_delegation_cache(delegation_urls
)
739 lookup
.verify_cache(delegation_files
)
740 downloader_parser
.create_db_and_insert_delegation_into_db(\
742 elif options
.init_lir
or options
.reload_lir
:
744 downloader_parser
.update_lir_delegation_cache(lir_urls
)
745 print "Extracting and inserting information from the lir files " \
746 "can take up to 5 minutes"
747 database_cache
.create_or_replace_lir_table_in_db()
748 for fname
in "ripe.db.inetnum ripe.db.inet6num".split():
749 downloader_parser
.extract_info_from_lir_file_and_insert_into_sqlite(fname
)
750 elif options
.download_cc
:
751 downloader_parser
.download_country_code_file()
752 database_cache
.commit_and_close_database()
754 if __name__
== "__main__":