diff --git a/README.rst b/README.rst index 73cabbd..c91001e 100644 --- a/README.rst +++ b/README.rst @@ -3,13 +3,11 @@ Shreddit Details ----------- -When one deletes their account on Reddit it does nothing with their comment history other than -obscure the author (replaces with [deleted]) which may not be good enough for some. +Uses the reddit_api over at https://github.com/mellort/reddit_api to do all the heavy lifting. Usage ----------- -- Add your Reddit details to user.json, should be self explanatory -- run `./schreddit` +- Just run `./schreddit` Caveats ----------- diff --git a/grab.py b/grab.py deleted file mode 100755 index 88b5b62..0000000 --- a/grab.py +++ /dev/null @@ -1,46 +0,0 @@ -#!/usr/bin/env python2 - -from __future__ import with_statement -import sys -try: import json -except ImportError: import simplejson as json -from urllib2 import urlopen, HTTPError -from time import sleep - -with open('user.json', 'r') as f: - user = json.load(f)['user'] - -sub_section = 'comments' -after = '' - -init_url = 'http://www.reddit.com/user/{user}/comments/.json?after=%s'.format(user=user) -next_url = init_url % after - -http = urlopen(next_url) -reddit = json.load(http) - -datum = [] -while True: - after = reddit['data']['after'] - children = reddit['data']['children'] - - # This bit fills datum with the id (for removal) and the date (for saving recent posts) - for child in children: - child_data = child['data'] - if 'id' in child_data: - datum.append({ - 'id': child_data[u'name'], - 'created': child_data['created'], - 'body': child_data['body'], - 'subreddit': child_data['subreddit']}) - - if after == None: - break - - next_url = init_url % after - http = urlopen(next_url) - reddit = json.load(http) - sleep(1) - -with open('data.json', 'w') as f: - json.dump(datum, f) diff --git a/kill.py b/kill.py deleted file mode 100755 index ee6e9bc..0000000 --- a/kill.py +++ /dev/null @@ -1,67 +0,0 @@ -#!/usr/bin/env python2 - -from __future__ import with_statement -try: import json -except ImportError: import simplejson as json -import sys, httplib, urllib -from datetime import datetime, timedelta -from time import sleep - -## Get the data we need to log into the API -with open('user.json', 'r') as f: - data = json.load(f) - -days = data['days'] -user = data['user'] -passwd = data['passwd'] - -## Load our json which should be all the user's history -with open('data.json', 'r') as f: - data = json.load(f) - -# Every thing before this time will be deleted -before_time = datetime.now() - timedelta(days=days) - -## Fill an array of IDs that are to be deleted -deletion_ids = [item for item in data if datetime.fromtimestamp(item['created']) < before_time] - -if len(deletion_ids) == 0: - print "Couldn't find any posts to delete" - exit(0) - -## This part logs you in. -headers = { - "Content-type": "application/x-www-form-urlencoded", - "User-Agent": "Shreddit" - } -conn = httplib.HTTPSConnection('ssl.reddit.com') -params = urllib.urlencode({ - 'user': user, - 'passwd': passwd, - 'api_type': 'json'}) - -conn.request("POST", "/api/login/%s" % user, params, headers) -http = conn.getresponse() -tmp = json.loads(http.read())['json']['data'] -headers.update({'Cookie': 'reddit_session=%s' % tmp['cookie']}) -modhash = tmp['modhash'] - -for dat in deletion_ids: - rid = dat['id'] - time = datetime.fromtimestamp(dat['created']).date() - subreddit = dat['subreddit'] - text = dat[u'body'][:20] - - #print '{rid}: {time} {subreddit}: "{text}..."'.format(subreddit=subreddit, rid=rid, time=time, text=text) - # And now for the deleting - conn = httplib.HTTPConnection('www.reddit.com') - params = urllib.urlencode({ - 'id': rid, - 'uh': modhash, - 'api_type': 'json'}) - #headers.update({"Content-Length": len(params)}) - conn.request('POST', '/api/del', params, headers) - http = conn.getresponse() - if http.read() != '{}': - print '''Failed to delete "%s" (%s - %s - %s)''' % (text, rid, time, subreddit) - sleep(2) diff --git a/user.json b/user.json deleted file mode 100644 index d4f665d..0000000 --- a/user.json +++ /dev/null @@ -1 +0,0 @@ -{"passwd": "", "user": "", "days": 7}