2 # -*- coding: utf-8 -*-
4 # For the people of Smubworld!
8 from optparse
import OptionParser
14 __program__
= 'blockfinder'
15 __url__
= 'https://github.com/ioerror/blockfinder/'
16 __author__
= 'Jacob Appelbaum <jacob@appelbaum.net>, David <db@d1b.org>'
17 __copyright__
= 'Copyright (c) 2010'
18 __license__
= 'See LICENSE for licensing information'
19 __version__
= '3.1415'
27 from future
import antigravity
37 import ipaddr
as Ipaddr
41 def ip_address_to_dec(ip_addr
):
42 octets
= ip_addr
.split('.')
43 return long(''.join(["%02X" % long(octet
) for octet
in octets
]), 16)
45 def return_first_ip_and_number_in_inetnum(line
):
46 start_ip
= line
.split("-")[0].strip()
47 end_ip
= line
.split("-")[1].strip()
48 num_ips
= 1 + (ip_address_to_dec(end_ip
) - \
49 ip_address_to_dec(start_ip
))
50 return start_ip
, num_ips
52 def ipv4_address_valid(ip_addr
):
53 ipv4arr
= ip_addr
.split('.')
62 def calculate_ipv4_subnet(host_count
):
65 return 34 - len(bin(host_count
- 1))
67 class BlockFinderError(Exception):
71 def __init__(self
, cache_dir
, verbose
=False):
72 self
.cache_dir
= cache_dir
73 self
.verbose
= verbose
77 def connect_to_database(self
):
78 if not os
.path
.exists(self
.cache_dir
):
80 print "Initializing the cache directory..."
81 os
.mkdir(self
.cache_dir
)
82 self
.conn
= sqlite3
.connect(self
.cache_dir
+ "sqlitedb")
83 self
.cursor
= self
.conn
.cursor()
85 def commit_and_close_database(self
):
89 def create_sql_database(self
):
90 """ Creates a new sqlite database.
91 Existing delegation entries are dropped prior to inserting
92 'newer' delegations. """
93 sql
= ('DROP TABLE IF EXISTS delegations; '
94 'CREATE TABLE delegations(registry TEXT, cc TEXT, '
95 'start TEXT, value INTEGER, date TEXT, status TEXT, '
97 'CREATE TABLE IF NOT EXISTS lir_record(cc TEXT, '
98 'start TEXT, value INTEGER, type INTEGER)')
99 self
.cursor
.executescript(sql
)
102 def insert_into_sql_database(self
, rows
):
103 """ Inserts delegation information into the sqlite database. """
104 sql
= ('INSERT INTO delegations (registry, cc, start, value, '
105 'date, status, type) VALUES (?, ?, ?, ?, ?, ?, ?)')
106 self
.cursor
.executemany(sql
, rows
)
109 def _get_total_delegations_from_db(self
):
110 """ Returns the total count of the number of entries in the ipv4,
111 ipv6 and asn table. """
112 sql
= 'SELECT COUNT(*) FROM delegations'
113 self
.cursor
.execute(sql
)
114 return int(self
.cursor
.fetchone()[0])
116 def _get_possible_match_entries(self
, cc
):
117 """ Get the count of 'possible' matching delegation entries. """
118 sql
= 'SELECT COUNT(*) FROM delegations WHERE cc = ?'
119 self
.cursor
.execute(sql
, cc
)
120 return int(self
.cursor
.fetchone()[0])
122 def use_sql_database(self
, request
, cc
):
123 """ Use the sqlite database that is created after fetching
124 delegations to output information for a given request. """
126 print "We have %d entries in our delegation cache." % \
127 self
._get
_total
_delegations
_from
_db
()
128 sql
= ('SELECT start, value FROM delegations WHERE type = ? '
131 self
.cursor
.execute(sql
, (request
, cc
[0]))
133 for row
in self
.cursor
:
134 if request
== "ipv4":
136 first
= Ipaddr
.IPv4Address(str(row
[0]))
137 last
= Ipaddr
.IPv4Address(int(first
) + int(row
[1]) \
139 result
+= [str(x
) for x
in \
140 Ipaddr
.summarize_address_range(first
, last
)]
142 result
.append(str(row
[0]) + "/" + \
143 str(calculate_ipv4_subnet(int(row
[1]))))
144 elif request
== "ipv6":
145 result
.append(str(row
[0]) + "/" + str(int(row
[1])))
147 result
.append(str(int(row
[0])))
150 result
.append("We found %d possible entries in our "
151 "delegation cache." % \
152 self
._get
_possible
_match
_entries
(cc
))
153 sql
= ('SELECT COUNT(*) FROM delegations WHERE cc = ? '
155 self
.cursor
.execute(sql
, (cc
[0], request
))
156 result
.append("We found %d matching entries in our "
157 "delegation cache." % int(self
.cursor
.fetchone()[0]))
160 def _rir_or_lir_lookup_ipv4(self
, ip_addr
, lookup_type
):
161 ipv4arr
= ip_addr
.split('.')
162 if lookup_type
== 'rir':
163 sql
= ('SELECT cc, start, value FROM delegations '
164 'WHERE type = "ipv4" AND start LIKE ?')
165 self
.cursor
.execute(sql
,
166 (ipv4arr
[0] + "." + ipv4arr
[1] + ".%",))
168 sql
= ('SELECT cc, start, value FROM lir_record '
169 'WHERE start LIKE ? AND type = 4')
170 self
.cursor
.execute(sql
,
171 (ipv4arr
[0] + "." + ipv4arr
[1] + ".%",))
172 row
= self
.cursor
.fetchone()
174 if lookup_type
== "rir":
175 sql
= ('SELECT cc, start, value FROM delegations '
176 'WHERE type = "ipv4" AND start LIKE ?')
177 self
.cursor
.execute(sql
, (ipv4arr
[0] + ".%",))
179 sql
= ('SELECT cc, start, value FROM lir_record '
180 'WHERE start LIKE ? AND type = 4')
181 self
.cursor
.execute(sql
, (ipv4arr
[0] + ".%",))
182 row
= self
.cursor
.fetchone()
183 while(row
is not None):
184 if (ip_address_to_dec(row
[1]) <= ip_address_to_dec(ip_addr
) \
185 < (ip_address_to_dec(row
[1]) + row
[2])):
187 row
= self
.cursor
.fetchone()
189 def rir_lookup(self
, ip_addr
):
190 return self
._rir
_or
_lir
_lookup
_ipv
4(ip_addr
, "rir")
192 def lir_lookup(self
, ip_addr
):
193 return self
._rir
_or
_lir
_lookup
_ipv
4(ip_addr
, "lir")
195 def asn_lookup(self
, asn
):
196 sql
= ('SELECT cc FROM delegations WHERE type = "asn" AND '
198 self
.cursor
.execute(sql
, (asn
,))
199 row
= self
.cursor
.fetchone()
203 def rir_or_lir_lookup_ipv6(self
, ip_addr
, ip_query
, type_q
):
205 sql
= ('SELECT cc, start, value FROM delegations '
206 'WHERE type = "ipv6" AND start like ?')
207 self
.cursor
.execute(sql
, (ip_query
,))
209 sql
= ('SELECT cc, start, value FROM lir_record '
210 'WHERE type = 6 AND start LIKE ?')
211 self
.cursor
.execute(sql
, (ip_query
,))
212 for row
in self
.cursor
:
214 if IPy
and ip_addr
in IPy
.IP(row
[1] + "/" + str(row
[2])):
216 except ValueError, e
:
221 def create_or_replace_lir_table_in_db(self
):
222 sql
= 'DROP TABLE IF EXISTS lir_record'
223 self
.cursor
.execute(sql
)
224 sql
= ('CREATE TABLE IF NOT EXISTS lir_record(cc TEXT, '
225 'start TEXT, value INTEGER, type INTEGER)')
226 self
.cursor
.execute(sql
)
229 def insert_lir_delegation(self
, data
):
230 sql
= ('INSERT INTO lir_record (cc, start, value, type) '
231 'VALUES (?, ?, ?, ?)')
232 self
.cursor
.execute(sql
, data
)
235 class DownloaderParser
:
236 def __init__(self
, cache_dir
, database_cache
, user_agent
, \
238 self
.cache_dir
= cache_dir
239 self
.database_cache
= database_cache
240 self
.user_agent
= user_agent
241 self
.verbose
= verbose
243 def update_progress_bar(self
, percent_done
, caption
=""):
244 """Write a progress bar to the console"""
245 rows
, columns
= map(int, \
246 os
.popen('stty size', 'r').read().split())
247 width
= columns
- 4 - len(caption
)
248 sys
.stdout
.write("[%s>%s] %s\x1b[G" % (
249 "=" * int(percent_done
*width
),
250 "." * (width
- int(percent_done
* width
)), caption
))
253 # XXX TODO:allow the use of a proxy
254 # Set up a proper Request object, set the user agent and if desired,
256 def fetch(self
, url
):
257 """ Fetch (with progress meter) and return the contents of a
259 req
= urllib2
.Request(url
)
260 req
.add_header('User-Agent', self
.user_agent
)
261 #req.set_proxy(host, type)
262 fetcher
= urllib2
.urlopen(req
)
263 length_header
= fetcher
.headers
.get("Content-Length")
264 if length_header
== None:
265 """ The server did not provide a Content-Length header. """
267 length
= int(length_header
)
268 print "Fetching ", str(round(float(length
/1024),2)), " kilobytes"
270 t_start
= time
.time()
272 t_delta
= time
.time() - t_start
275 if length_header
!= -1:
276 self
.update_progress_bar(float(len(ret
)) / length
,
277 "%.2f K/s" % (len(ret
) / 1024 / t_delta
))
278 tmp
= fetcher
.read(1024)
280 if len(ret
) != length
and length_header
!= -1:
281 raise Exception("Expected %s bytes, only received " \
282 "%s" % (len(ret
), length
))
287 def write_to_a_text_file(self
, file_loc
, data
):
288 f
= open(file_loc
, 'w')
292 def extract_data_from_gzip_file(self
, gzip_file_loc
, \
294 gzip_file
= gzip
.open(gzip_file_loc
, 'rb')
295 gunzipped_file
= open(extract_file_loc
, 'w')
297 gunzipped_data
= gzip_file
.read(1024)
298 if gunzipped_data
== "":
300 gunzipped_file
.writelines(gunzipped_data
)
302 gunzipped_file
.close()
304 def read_data_from_binary_file(self
, fname
):
305 f
= open(fname
, 'rb')
310 def create_blockfinder_cache_dir(self
):
311 if not os
.path
.exists(self
.cache_dir
):
313 print "Initializing the cache directory..."
314 os
.mkdir(self
.cache_dir
)
316 def cache_delegation(self
, delegation_url
):
317 """ Attempt to cache the contents of a delegation url in our
320 print "Fetching " + delegation_url
321 delegation
= self
.fetch(delegation_url
)
322 tmp
= delegation_url
.split('/')
323 delegation_file
= str(self
.cache_dir
) + str(tmp
[-1])
325 self
.write_to_a_text_file(delegation_file
, delegation
)
331 def cache_is_dated(self
, cached_files
):
332 """ Returns True if the mtime of any files in cache dir is
335 os
.stat(self
.cache_dir
)
337 print "\nDid you initialize the cache directory?\n"
339 for file in cached_files
:
340 fstat
= os
.stat(self
.cache_dir
+ file)
341 if (time
.time() - fstat
.st_mtime
) > 86400:
345 def get_md5_from_delegation_md5_file(self
, delegation_file
):
346 """ Returns the md5sum from the delegation md5 file
347 if it doesn't exist it returns an empty string"""
350 f
= open(self
.cache_dir
+ delegation_file
+ ".md5", "r")
354 pos
= checksum
.find("=") +2
355 checksum
= str(checksum
[pos
:-1])
360 def verify_delegation_file(self
, delegation_file
):
361 """ Compares the delegation file md5sum to that of the provided
362 md5sum, returns True if they match otherwise returns
365 checksum_of_file
= ""
367 data
= self
.read_data_from_binary_file(self
.cache_dir
+ \
369 checksum_of_file
= str(hashlib
.md5(data
).hexdigest())
372 checksum
= self
.get_md5_from_delegation_md5_file(delegation_file
)
373 if checksum
!= checksum_of_file
:
375 if checksum
== checksum_of_file
and checksum
!= "":
379 def verify_cache(self
, delegation_files
):
380 """ If in verbose mode prints the result of checking the checksum
381 of the delegation files. """
382 for file in delegation_files
:
384 print "verifying " + file
385 if self
.verify_delegation_file(file):
387 print "the md5 checksum of " + file + \
388 " *matches* the provided checksum"
391 print "the md5 checksum of " + file + \
392 " does *not* match the provided checksum"
394 def update_delegation_cache(self
, delegation_urls
):
395 """ Fetch multiple delegation urls and cache the contents. """
396 print "Updating delegation cache..."
397 for url
in delegation_urls
.split():
398 self
.cache_delegation(url
+ ".md5")
399 if self
.verify_delegation_file(url
.rpartition('/')[-1]):
402 self
.cache_delegation(url
)
404 def update_lir_delegation_cache(self
, delegation_urls
):
405 """ Fetch multiple LIR delegation urls and cache the contents. """
406 print "Updating LIR delegation cache..."
407 for url
in delegation_urls
.split():
408 self
.cache_delegation(url
)
409 self
.unpack_a_delegation_cache(delegation_urls
, "LIR")
411 def unpack_a_delegation_cache(self
, delegation_urls
, del_type
=""):
412 """ Unpack the fetched LIR delegation files into the blockfinder
414 # This probably should unlink the gzip'ed file if we care about
416 for url
in delegation_urls
.split():
417 gzip_filename
= url
.rpartition('/')[-1]
418 gunziped_filename
= gzip_filename
.rpartition('.')[0]
420 print "Unpacking " + del_type
+ "file " + \
421 gzip_filename
+ " into our cache as " + \
423 self
.extract_data_from_gzip_file(self
.cache_dir
+ \
424 gzip_filename
, self
.cache_dir
+ gunziped_filename
)
426 def update_geoip_cache(self
, geoip_urls
):
427 """ Fetch country level resolution GeoIP files from a given url
428 and cache the contents. Unpack it if it's compressed. """
429 print "Updating GeoIP cache..."
430 for url
in geoip_urls
.split():
431 self
.cache_delegation(url
)
432 self
.unpack_a_delegation_cache(geoip_urls
, "GeoIP")
434 def load_delegation(self
, delegation_file
):
435 """ Load, parse and store the delegation file contents as a
437 keys
= "registry cc type start value date status"
439 f
= open(delegation_file
, "r")
440 delegations
= [dict((k
,v
) for k
,v
in zip(keys
.split(), \
441 line
.strip().split("|"))) \
442 for line
in f
.readlines() if not line
.startswith("#")]
448 def load_all_delegations(self
, delegation_urls
):
449 """ Load all delegations into memory. """
451 for url
in delegation_urls
.split():
452 filename
= url
.rpartition('/')[-1]
454 print "Attempting to load delegation file into " \
455 + "memory: " + filename
456 delegations
.append(self
.load_delegation(self
.cache_dir
+ \
460 def download_country_code_file(self
):
461 """ Download and save the latest opencountrycode
462 TXT(';'-separated) file """
463 url
= "http://www.iso.org/iso/list-en1-semic-3.txt"
464 text_content
= urllib2
.urlopen(url
).read()
465 self
.write_to_a_text_file(self
.cache_dir
+ "countrycodes.txt", \
468 def extract_info_from_lir_file_and_insert_into_sqlite(self
, filename
):
473 for line
in open(self
.cache_dir
+ filename
, "r"):
474 line
= line
.replace("\n", "")
477 country
, block
, version
= "", [], ""
478 elif not entry
and "inetnum:" in line
:
480 line
= line
.replace("inetnum:", "").strip()
481 start_ip
, num_ips
= \
482 return_first_ip_and_number_in_inetnum(line
)
483 block
= [start_ip
, num_ips
]
489 elif not entry
and "inet6num:" in line
:
491 block
= line
.replace("inet6num:", \
492 "").strip().split("/")
498 elif entry
and "country:" in line
:
499 country
= line
.replace("country:", "").strip()
500 data
= (country
, block
[0], block
[1], version
)
501 self
.database_cache
.insert_lir_delegation(data
)
503 def create_db_and_insert_delegation_into_db(self
, delegation_urls
):
504 self
.database_cache
.create_sql_database()
505 delegations
= self
.load_all_delegations(delegation_urls
)
507 for delegation
in delegations
:
508 for entry
in delegation
:
509 registry
= str(entry
['registry'])
510 if not registry
.isdigit() and str(entry
['cc']) !="*":
511 temp_row
= [entry
['registry'], entry
['cc'], \
512 entry
['start'], entry
['value'], \
513 entry
['date'], entry
['status'], entry
['type']]
514 rows
.append(temp_row
)
515 self
.database_cache
.insert_into_sql_database(rows
)
518 def __init__(self
, cache_dir
, database_cache
, verbose
=False):
519 self
.cache_dir
= cache_dir
520 self
.database_cache
= database_cache
521 self
.verbose
= verbose
523 def build_country_code_dictionary(self
):
524 """ Return a dictionary mapping country name to the country
527 txt_file
= str(self
.cache_dir
) + "countrycodes.txt"
528 for line
in open(txt_file
, 'r'):
529 line
= line
.replace("\n", "").replace("\r", "")
530 if line
.startswith("This list states the country"):
532 if line
== "" or ";" not in line
:
534 name
, code
= line
.split(";")
535 """ capitalize the individual parts of the country name """
536 name
= ' '.join([part
.capitalize() for part
in \
541 def get_name_from_country_code(self
, cc_code
):
542 map_co
= self
.build_country_code_dictionary()
543 country_name
= [(key
, value
) for (key
, value
) in map_co
.items() \
545 if len(country_name
) > 0:
546 return country_name
[0][0]
548 def get_country_code_from_name(self
, country_name
):
549 """ Return the country code for a given country name. """
550 map_co
= self
.build_country_code_dictionary()
551 cc_code
= [map_co
[key
] for key
in map_co
.keys() if \
552 key
.upper().startswith(country_name
.upper())]
556 def geoip_lookup(self
, ip_addr
):
557 # This would work with the CVS version of the GeoIP code
558 # However, MaxMind hasn't done a release in a long time.
559 # http://geoip.cvs.sourceforge.net/viewvc/geoip/python/\
560 # test_v6.py?revision=1.1&view=markup
561 # gi = GeoIP.open(self.cache_dir + \
562 # "GeoIPv6.dat",GeoIP.GEOIP_STANDARD)
563 # cc = gi.country_code_by_addr_v6(ip_addr)
564 # cc_name = gi.country_name_by_addr_v6(ip_addr)
565 gi
= GeoIP
.open(self
.cache_dir
+ "GeoIP.dat",GeoIP
.GEOIP_STANDARD
)
566 cc
= gi
.country_code_by_addr(ip_addr
)
567 cc_name
= gi
.country_name_by_addr(ip_addr
)
570 def lookup_ipv6_address(self
, ip_addr
):
571 print "Reverse lookup for: " + ip_addr
572 split_addr
= ip_addr
.split(":")
573 for i
in ["RIR", "LIR"]:
574 ip_query
= ip_addr
.split(":")[0] + ":" + \
575 ip_addr
.split(":")[1] + "%"
576 result
= self
.database_cache
.rir_or_lir_lookup_ipv6(ip_addr
, \
579 print i
, "Country Name:", \
580 self
.get_name_from_country_code(result
)
582 ip_query
= ip_addr
.split(":")[0] + ":%"
583 result
= self
.database_cache
.rir_or_lir_lookup_ipv6( \
584 ip_addr
, ip_query
, i
)
586 print i
, "Country Name:", \
587 self
.get_name_from_country_code(result
)
589 def lookup_ip_address(self
, ip_addr
):
590 """ Return the country code and name for a given ip address.
591 Attempts to use GeoIP if available."""
592 ip_addr
= socket
.getaddrinfo(ip_addr
, 80)[0][4][0]
593 if IPy
and IPy
.IP(ip_addr
).version() == 6:
594 self
.database_cache
.lookup_ipv6_address(ip_addr
)
596 if not ipv4_address_valid(ip_addr
):
597 raise BlockFinderError('Invalid ip address!')
598 print "Reverse lookup for: " + ip_addr
600 geoip_cc
, geoip_cc_name
= self
.geoip_lookup(ip_addr
)
601 print "GeoIP country code: " + str(geoip_cc
)
602 print "GeoIP country name: " + str(geoip_cc_name
)
603 rir_cc
= self
.database_cache
.rir_lookup(ip_addr
)
605 print 'RIR country code:', rir_cc
606 print 'RIR country:', self
.get_name_from_country_code(rir_cc
)
608 print 'Not found in RIR db'
609 lir_cc
= self
.database_cache
.lir_lookup(ip_addr
)
611 print 'LIR country code:', lir_cc
612 print 'LIR country:', self
.get_name_from_country_code(lir_cc
)
614 if geoip_cc
!= rir_cc
:
615 print "It appears that the RIR data conflicts with the " \
616 "GeoIP data. The GeoIP data is likely closer " \
617 "to being correct due to sub-delegation issues " \
618 "with LIR databases."
620 def asn_lookup(self
, asn
):
621 asn_cc
= self
.database_cache
.asn_lookup(asn
)
623 print "AS country code: %s" % asn_cc
624 print "AS country name: %s" % \
625 self
.get_name_from_country_code(asn_cc
)
627 print "AS%s not found!" % asn
629 def fetch_rir_blocks_by_country(self
, request
, country
):
630 return self
.database_cache
.use_sql_database(request
, country
)
633 """ Where the magic starts. """
634 parser
= OptionParser()
635 parser
.add_option("-v", "--verbose", action
="store_true", \
636 dest
="verbose", help = "Be verbose", default
=False)
637 parser
.add_option("-c", "--cachedir", action
="store", \
638 dest
="cache_dir", help = "Set the cache directory", \
639 default
=str(os
.path
.expanduser('~')) + "/.blockfinder/")
640 parser
.add_option("-u","--useragent", action
="store",
642 help=('Provide a useragent which will be used when '
643 'fetching delegation files'), default
="Mozilla/5.0")
644 parser
.add_option("-4", "--ipv4", action
="store_true", dest
="ipv4", \
645 help="Search IPv4 allocations")
646 parser
.add_option("-6", "--ipv6", action
="store_true", dest
="ipv6", \
647 help="Search IPv6 allocations")
648 parser
.add_option("-a", "--asn", action
="store_true", dest
="asn", \
649 help="Search ASN allocations")
650 parser
.add_option("-t", "--nation-state", action
="store", dest
="cc", \
651 help="Set the country to search (given as a two-letter code)")
652 parser
.add_option("-n", "--country-name", action
="store", \
653 dest
="country_name", help="Set country to search (full name)")
654 parser
.add_option("-x", "--hack-the-internet", action
="store_true", \
655 dest
="hack_the_internet", help="Hack the internet")
656 parser
.add_option("-r", "--reverse-lookup", action
="store", \
657 dest
="reverse_host", \
658 help=("Return the country name for the specified IP or ASN "
660 parser
.add_option("-i", "--initialize-delegation", \
661 action
="store_true", dest
="init_del", \
662 help="Initialize or update delegation information")
663 parser
.add_option("-l", "--initialize-lir", action
="store_true", \
664 dest
="init_lir", help="Initialize or update lir information")
665 parser
.add_option("-d", "--reload-delegation", action
="store_true", \
667 help="Use existing delegation files to update the database")
668 parser
.add_option("-z", "--reload-lir", action
="store_true",
670 help="Use existing lir files to update the database")
671 (options
, args
) = parser
.parse_args()
673 verbose
= options
.verbose
674 user_agent
= options
.user_agent
675 cache_dir
= options
.cache_dir
676 database_cache
= DatabaseCache(cache_dir
, verbose
)
677 database_cache
.connect_to_database()
678 downloader_parser
= DownloaderParser(cache_dir
, database_cache
, \
680 lookup
= Lookup(cache_dir
, database_cache
)
682 delegation_urls
= """
683 ftp://ftp.arin.net/pub/stats/arin/delegated-arin-latest
684 ftp://ftp.ripe.net/ripe/stats/delegated-ripencc-latest
685 ftp://ftp.afrinic.net/pub/stats/afrinic/delegated-afrinic-latest
686 ftp://ftp.apnic.net/pub/stats/apnic/delegated-apnic-latest
687 ftp://ftp.lacnic.net/pub/stats/lacnic/delegated-lacnic-latest
689 geoip_country_urls
= """http://geolite.maxmind.com/download/geoip/database/GeoLiteCountry/GeoIP.dat.gz
690 http://geolite.maxmind.com/download/geoip/database/GeoIPv6.dat.gz"""
691 lir_urls
= """ftp://ftp.ripe.net/ripe/dbase/split/ripe.db.inetnum.gz
692 ftp://ftp.ripe.net/ripe/dbase/split/ripe.db.inet6num.gz"""
693 delegation_files
= []
694 for url
in delegation_urls
.split():
695 filename
= url
.rpartition('/')
696 delegation_files
.append(filename
[-1])
697 downloader_parser
.create_blockfinder_cache_dir()
698 if not os
.path
.exists(cache_dir
+ "countrycodes.txt"):
700 downloader_parser
.download_country_code_file()
703 if options
.hack_the_internet
:
704 print "all your bases are belong to us!"
706 if options
.asn
and options
.reverse_host
:
707 lookup
.asn_lookup(options
.reverse_host
)
709 if options
.reverse_host
:
710 lookup
.lookup_ip_address(options
.reverse_host
)
713 requests
.append("ipv4")
715 requests
.append("ipv6")
717 requests
.append("asn")
719 country
= options
.cc
.upper()
720 if options
.country_name
:
721 country
= lookup
.get_country_code_from_name(options
.country_name
)
722 if options
.reload_del
:
723 downloader_parser
.create_db_and_insert_delegation_into_db(\
729 downloader_parser
.update_geoip_cache(geoip_country_urls
)
730 downloader_parser
.update_delegation_cache(delegation_urls
)
732 lookup
.verify_cache(delegation_files
)
733 downloader_parser
.create_db_and_insert_delegation_into_db(\
735 if not options
.init_lir
:
737 if options
.init_lir
or options
.reload_lir
:
739 downloader_parser
.update_lir_delegation_cache(lir_urls
)
740 print "Extracting and inserting information from the lir files " \
741 "can take up to 5 minutes"
742 database_cache
.create_or_replace_lir_table_in_db()
743 for fname
in "ripe.db.inetnum ripe.db.inet6num".split():
744 downloader_parser
.extract_info_from_lir_file_and_insert_into_sqlite(fname
)
747 print "Nothing to do. Have you requested anything?"
748 print "Example usage: blockfinder -v --ipv4 -t mm"
751 print "It appears your search did not match a country."
753 # Check our cache age and warn if it's aged
754 if downloader_parser
.cache_is_dated(delegation_files
) and verbose
:
755 print "Your delegation cache is older than 24 hours; you " \
756 "probably want to update it."
758 print "Using country code: %s" % country
759 for request
in requests
:
762 print " \n".join(lookup
.fetch_rir_blocks_by_country(\
764 except sqlite3
.Error
, e
:
766 print "Please try reloading the database. (run " \
771 database_cache
.commit_and_close_database()
775 if __name__
== "__main__":