comparison paste/paste.3547 @ 0:e037173e0012

Initial import.
author HackBot
date Thu, 16 Feb 2012 19:42:32 +0000
parents
children
comparison
equal deleted inserted replaced
-1:000000000000 0:e037173e0012
1 #!/usr/bin/env python
2
3 import os
4 import sys
5 import json
6 import urllib2
7
8 proxy_handler = urllib2.ProxyHandler({'http': os.environ['http_proxy']})
9 opener = urllib2.build_opener(proxy_handler)
10 urllib2.install_opener(opener)
11
12 def lose():
13 print 'You get NOTHING! You LOSE! Good DAY sir!'
14 sys.exit()
15
16 def eels():
17 print 'My hovercraft is full of eels.'
18 sys.exit()
19
20 if len(sys.argv) > 2:
21 args = sys.argv[1:]
22 elif len(sys.argv) == 2:
23 args = sys.argv[1].split()
24 else:
25 lose()
26
27 if len(args) == 2:
28 from_lang = args[0]
29 to_lang = 'en'
30 word = args[1]
31 elif len(args) == 3:
32 from_lang = args[0]
33 to_lang = args[1]
34 word = args[2]
35 else:
36 lose()
37
38 def query(continue_id):
39 url = 'http://%s.wikipedia.org/w/api.php?format=json&action=query&' \
40 'redirects=1&titles=%s&prop=langlinks' % (from_lang, word)
41 if continue_id:
42 url += '&llcontinue=' + continue_id
43 try:
44 response = urllib2.urlopen(url).read()
45 except urllib2.URLError, e:
46 print e.reason
47 sys.exit()
48 return json.loads(response)
49
50 continue_id = None
51 while True:
52 q = query(continue_id)
53 if '-1' in q['query']['pages']:
54 eels()
55 page = q['query']['pages'].values()[0]
56 if 'langlinks' not in page:
57 eels()
58 for link in q['query']['pages'].values()[0]['langlinks']:
59 if link['lang'] == to_lang:
60 print link['*'].encode('utf-8')
61 sys.exit()
62 if 'query-continue' in q:
63 continue_id = q['query-continue']['langlinks']['llcontinue']
64 else:
65 eels()