Datasets:
Sub-tasks:
multi-class-classification
Languages:
English
Size:
1K<n<10K
Tags:
natural-language-understanding
ideology classification
text classification
natural language processing
License:
| import requests | |
| import time | |
| import random | |
| """This script is used to get many posts from the desired subreddit(s)""" | |
| subreddit_list = [ | |
| "theredpillrebooted", | |
| "RedPillWomen", | |
| "Feminism", | |
| "marriedredpill", | |
| "TheBluePill", | |
| "PurplePillDebate", | |
| "RedPillWives", | |
| ] | |
| # url = f'https://www.reddit.com/r/{subreddit}/.json?t=all&limit=100' | |
| url_template = "https://www.reddit.com/r/{}/.json?t=all{}" | |
| # headers = { | |
| # 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3' | |
| # } | |
| headers = {"User-Agent": "Testing Bot Gundam Wing"} | |
| # response = requests.get(url, headers=headers) | |
| params = "" | |
| counter = 10 | |
| post_list = [] | |
| for subreddit in subreddit_list: | |
| while counter > 0: | |
| print(f"Getting posts with params: {params}") | |
| print("\n\n\n\n") | |
| url = url_template.format(subreddit, params) | |
| response = requests.get(url, headers=headers) | |
| if response.ok: | |
| data = response.json() | |
| # save data to file | |
| # with open(f"reddit_{subreddit}_{counter}.json", "w") as f: | |
| # f.write(response.text) | |
| posts = data["data"]["children"] | |
| print(f"Got {len(posts)} posts") | |
| for post in posts: | |
| # print(post["data"]["title"]) | |
| pdata = post["data"] | |
| post_id = pdata["id"] | |
| title = pdata["title"] | |
| # url = pdata["url"] | |
| text = pdata.get("selftext") | |
| score = pdata["score"] | |
| author = pdata["author"] | |
| date = pdata["created_utc"] | |
| url = pdata.get("url_overridden_by_dest") | |
| print(f"{post_id}: {title} - {url}") | |
| # print("Keys are ", pdata.keys()) | |
| # post_list.append( | |
| # { | |
| # "id": post_id, | |
| # "title": title, | |
| # "text": text, | |
| # "url": url, | |
| # "score": score, | |
| # "author": author, | |
| # "date": date, | |
| # "pdata": pdata, | |
| # } | |
| # ) | |
| post_list.append( | |
| [subreddit, post_id, title, text, url, score, author, date, pdata] | |
| ) | |
| print(f"Got {len(posts)} posts") | |
| # params = f"?after={data['data']['after']}" | |
| try: | |
| params = "&after=" + data["data"]["after"] | |
| except: | |
| print( | |
| "No more posts, broke on ", subreddit, "with counter at ", counter | |
| ) | |
| break | |
| counter -= 1 | |
| time.sleep(random.randint(1, 45)) | |
| else: | |
| print(f"Error: {response.status_code}") | |
| counter = 10 | |
| params = "" | |