Cleaned everything up for PRAW

pull/5/head
David Trail 13 years ago
parent 6029949ec6
commit 0b97644442

@ -3,13 +3,11 @@ Shreddit
Details
-----------
When one deletes their account on Reddit it does nothing with their comment history other than
obscure the author (replaces with [deleted]) which may not be good enough for some.
Uses the reddit_api over at https://github.com/mellort/reddit_api to do all the heavy lifting.
Usage
-----------
- Add your Reddit details to user.json, should be self explanatory
- run `./schreddit`
- Just run `./schreddit`
Caveats
-----------

@ -1,46 +0,0 @@
#!/usr/bin/env python2
from __future__ import with_statement
import sys
try: import json
except ImportError: import simplejson as json
from urllib2 import urlopen, HTTPError
from time import sleep
with open('user.json', 'r') as f:
user = json.load(f)['user']
sub_section = 'comments'
after = ''
init_url = 'http://www.reddit.com/user/{user}/comments/.json?after=%s'.format(user=user)
next_url = init_url % after
http = urlopen(next_url)
reddit = json.load(http)
datum = []
while True:
after = reddit['data']['after']
children = reddit['data']['children']
# This bit fills datum with the id (for removal) and the date (for saving recent posts)
for child in children:
child_data = child['data']
if 'id' in child_data:
datum.append({
'id': child_data[u'name'],
'created': child_data['created'],
'body': child_data['body'],
'subreddit': child_data['subreddit']})
if after == None:
break
next_url = init_url % after
http = urlopen(next_url)
reddit = json.load(http)
sleep(1)
with open('data.json', 'w') as f:
json.dump(datum, f)

@ -1,67 +0,0 @@
#!/usr/bin/env python2
from __future__ import with_statement
try: import json
except ImportError: import simplejson as json
import sys, httplib, urllib
from datetime import datetime, timedelta
from time import sleep
## Get the data we need to log into the API
with open('user.json', 'r') as f:
data = json.load(f)
days = data['days']
user = data['user']
passwd = data['passwd']
## Load our json which should be all the user's history
with open('data.json', 'r') as f:
data = json.load(f)
# Every thing before this time will be deleted
before_time = datetime.now() - timedelta(days=days)
## Fill an array of IDs that are to be deleted
deletion_ids = [item for item in data if datetime.fromtimestamp(item['created']) < before_time]
if len(deletion_ids) == 0:
print "Couldn't find any posts to delete"
exit(0)
## This part logs you in.
headers = {
"Content-type": "application/x-www-form-urlencoded",
"User-Agent": "Shreddit"
}
conn = httplib.HTTPSConnection('ssl.reddit.com')
params = urllib.urlencode({
'user': user,
'passwd': passwd,
'api_type': 'json'})
conn.request("POST", "/api/login/%s" % user, params, headers)
http = conn.getresponse()
tmp = json.loads(http.read())['json']['data']
headers.update({'Cookie': 'reddit_session=%s' % tmp['cookie']})
modhash = tmp['modhash']
for dat in deletion_ids:
rid = dat['id']
time = datetime.fromtimestamp(dat['created']).date()
subreddit = dat['subreddit']
text = dat[u'body'][:20]
#print '{rid}: {time} {subreddit}: "{text}..."'.format(subreddit=subreddit, rid=rid, time=time, text=text)
# And now for the deleting
conn = httplib.HTTPConnection('www.reddit.com')
params = urllib.urlencode({
'id': rid,
'uh': modhash,
'api_type': 'json'})
#headers.update({"Content-Length": len(params)})
conn.request('POST', '/api/del', params, headers)
http = conn.getresponse()
if http.read() != '{}':
print '''Failed to delete "%s" (%s - %s - %s)''' % (text, rid, time, subreddit)
sleep(2)

@ -1 +0,0 @@
{"passwd": "", "user": "", "days": 7}
Loading…
Cancel
Save