|
|
|
@ -1,17 +1,13 @@
|
|
|
|
|
#!/usr/bin/env python2
|
|
|
|
|
|
|
|
|
|
import sys
|
|
|
|
|
from json import loads, dumps
|
|
|
|
|
import json # from json import loads, dumps
|
|
|
|
|
from urllib2 import urlopen, HTTPError
|
|
|
|
|
from time import sleep
|
|
|
|
|
|
|
|
|
|
user = None
|
|
|
|
|
if len(sys.argv) == 2:
|
|
|
|
|
user = sys.argv[1]
|
|
|
|
|
else:
|
|
|
|
|
f = open('user.json', 'r')
|
|
|
|
|
user = loads(f.read())['user']
|
|
|
|
|
f.close()
|
|
|
|
|
f = open('user.json', 'r')
|
|
|
|
|
user = json.load(f)['user']
|
|
|
|
|
f.close()
|
|
|
|
|
|
|
|
|
|
sub_section = 'comments'
|
|
|
|
|
after = ''
|
|
|
|
@ -20,19 +16,19 @@ init_url = 'http://www.reddit.com/user/{user}/comments/.json?after=%s'.format(us
|
|
|
|
|
next_url = init_url % after
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
http = urlopen(next_url).read()
|
|
|
|
|
http = urlopen(next_url)
|
|
|
|
|
except HTTPError:
|
|
|
|
|
raise HTTPError("You seem to have given an invalid user")
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
json = loads(http)
|
|
|
|
|
reddit = json.load(http)
|
|
|
|
|
except ValueError:
|
|
|
|
|
raise ValueError("Failed to decode json.")
|
|
|
|
|
|
|
|
|
|
datum = []
|
|
|
|
|
while True:
|
|
|
|
|
after = json['data']['after']
|
|
|
|
|
children = json['data']['children']
|
|
|
|
|
after = reddit['data']['after']
|
|
|
|
|
children = reddit['data']['children']
|
|
|
|
|
|
|
|
|
|
# This bit fills datum with the id (for removal) and the date (for saving recent posts)
|
|
|
|
|
for child in children:
|
|
|
|
@ -44,10 +40,10 @@ while True:
|
|
|
|
|
break
|
|
|
|
|
|
|
|
|
|
next_url = init_url % after
|
|
|
|
|
http = urlopen(next_url).read()
|
|
|
|
|
json = loads(http)
|
|
|
|
|
http = urlopen(next_url)
|
|
|
|
|
reddit = json.load(http)
|
|
|
|
|
sleep(1) # don't want to hammer reddit to hard
|
|
|
|
|
|
|
|
|
|
f = open('data.json', 'w')
|
|
|
|
|
f.write(dumps(datum))
|
|
|
|
|
json.dump(datum, f)
|
|
|
|
|
f.close()
|
|
|
|
|