Есть скрипт для парсинга статистики URL'ов из логов Nginx'a:
#!/usr/bin/env python3.2 import sys urls = {} try: while 1: line = raw_input() line_arr = line.split(" ") try: host = line_arr[-1] host = host[:1] host = host[:-1] url = line_arr[6] t = float (line_arr[-2]) #print host, url, t try: urls[host + url] = (urls[host + url][0] + t, urls[host + url][1] + 1) except KeyError as e: urls[host + url] = (t, 1) except ValueError as e: pass except EOFError as e: pass def sort_by_value(d): """ Returns the keys of dictionary d sorted by their values """ items=d.items() backitems=[ [v[1],v[0]] for v in items] backitems.sort(reverse=True) return [backitems[i][1] for i in range (0,len(backitems))] if (len(sys.argv) > 1): f = open (sys.argv[1], 'r') for k in f.readlines(): k = k.strip() try: print ("urls[k][0], urls[k][1], urls[k][0] / urls[k][1], k") except: print ("0, 0, k") else: i = 0 for k in sort_by_value(urls): print ("urls[k][0], urls[k][1], urls[k][0] / urls[k][1], k") i +=1 if i > 100: break
При выполнении выдаёт:
line 50 ^ IndentationError: unexpected unindent