attempted to fix seent, testing later

This commit is contained in:
Luke Ogburn
2022-04-13 11:21:56 -04:00
parent 6b75e744da
commit 827beb8079
5 changed files with 14 additions and 13 deletions

View File

@@ -20,8 +20,7 @@ you can run `pip3 install Mastodon.py praw` to install both of these.
- [x] Separate methods methods to make code cleaner - [x] Separate methods methods to make code cleaner
**Likely** **Likely**
- [ ] Keep track of what has been scraped and tooted to not duplicate posts - [?] Keep track of what has been scraped and tooted to not duplicate posts
- This is a pain in the butt for some reason
- [ ] Debugging logging - [ ] Debugging logging
- [ ] Move all vars into config - [ ] Move all vars into config
- [ ] Docker image - [ ] Docker image

13
bot.py
View File

@@ -3,8 +3,7 @@ import logging
# Mastodon bot to post things # Mastodon bot to post things
class bot(): class bot():
def __init__(self, config, debug=False): def __init__(self, config, neuter=False):
self.debug = debug
self.masto = Mastodon(access_token=config["mastodon"]["access_token"], api_base_url=config["mastodon"]["host"]) self.masto = Mastodon(access_token=config["mastodon"]["access_token"], api_base_url=config["mastodon"]["host"])
# uploads media to mastodon, returns the mastodon ID # uploads media to mastodon, returns the mastodon ID
@@ -17,9 +16,15 @@ class bot():
def upload_all_media(self, filenames): def upload_all_media(self, filenames):
ids = [] ids = []
for fn in filenames: for fn in filenames:
ids.append(self.upload_media(fn)) if not self.neuter:
ids.append(self.upload_media(fn))
else:
print(f"Would have uploaded {fn}")
return ids return ids
def toot(self, text, media=None): def toot(self, text, media=None):
logging.info(f"Posting:\n Text: {text}") logging.info(f"Posting:\n Text: {text}")
self.masto.status_post(text, media_ids=media) if not self.neuter:
self.masto.status_post(text, media_ids=media)
else:
print(f"Would have tooted: {text}")

View File

@@ -13,7 +13,6 @@ class helper():
# service to pass itself in every time # service to pass itself in every time
service = service.service service = service.service
low_activity_random = service.low_activity_random low_activity_random = service.low_activity_random
debug = service.debug
places = service.places places = service.places
seent = service.seent seent = service.seent

View File

@@ -18,18 +18,16 @@ class reddit_scraper:
try: self.seent = savefile["reddit"] try: self.seent = savefile["reddit"]
except: self.seent = {} except: self.seent = {}
### REDDIT METHODS
# gets posts from a given subreddit # gets posts from a given subreddit
def scrape(self, sub, limit): def scrape(self, sub, limit):
# make sure self.seent has the sub, add if not # make sure self.seent has the sub, add if not
if sub not in self.seent: self.seent[sub] = time.time() if sub not in self.seent: self.seent[sub] = time.time()
# get posts that aren't in seent list # get posts that aren't seent
post_list = [] post_list = []
posts = self.login.subreddit(sub).new(limit=limit) posts = self.login.subreddit(sub).new(limit=limit)
posts = helper.reddit_listify(posts) posts = helper.reddit_listify(posts)
for p in posts[::-1]: for p in posts[::-1]:
if helper.ts_older(p.created, self.seent[sub]): if helper.ts_older(self.seent[sub], p.created):
break break
logging.info(f"Scraping post {p.id}") logging.info(f"Scraping post {p.id}")
post_list.append(p) post_list.append(p)

View File

@@ -35,7 +35,7 @@ class scraper:
# downloads a given post's media and return the locations # downloads a given post's media and return the locations
def download(self, post): def download(self, post):
logging.info(f"Downloading {post.id}... ") logging.info(f"Downloading {post.id}... ")
result = self.login.download(post) result = [] #self.login.download(post) neuter
logging.info(f"Done downloading {post.id}.") logging.info(f"Done downloading {post.id}.")
return result return result