attempted to fix seent, testing later

This commit is contained in:
Luke Ogburn
2022-04-13 11:21:56 -04:00
parent 6b75e744da
commit 827beb8079
5 changed files with 14 additions and 13 deletions

View File

@@ -20,8 +20,7 @@ you can run `pip3 install Mastodon.py praw` to install both of these.
- [x] Separate methods methods to make code cleaner
**Likely**
- [ ] Keep track of what has been scraped and tooted to not duplicate posts
- This is a pain in the butt for some reason
- [?] Keep track of what has been scraped and tooted to not duplicate posts
- [ ] Debugging logging
- [ ] Move all vars into config
- [ ] Docker image

13
bot.py
View File

@@ -3,8 +3,7 @@ import logging
# Mastodon bot to post things
class bot():
def __init__(self, config, debug=False):
self.debug = debug
def __init__(self, config, neuter=False):
self.masto = Mastodon(access_token=config["mastodon"]["access_token"], api_base_url=config["mastodon"]["host"])
# uploads media to mastodon, returns the mastodon ID
@@ -17,9 +16,15 @@ class bot():
def upload_all_media(self, filenames):
ids = []
for fn in filenames:
ids.append(self.upload_media(fn))
if not self.neuter:
ids.append(self.upload_media(fn))
else:
print(f"Would have uploaded {fn}")
return ids
def toot(self, text, media=None):
logging.info(f"Posting:\n Text: {text}")
self.masto.status_post(text, media_ids=media)
if not self.neuter:
self.masto.status_post(text, media_ids=media)
else:
print(f"Would have tooted: {text}")

View File

@@ -13,7 +13,6 @@ class helper():
# service to pass itself in every time
service = service.service
low_activity_random = service.low_activity_random
debug = service.debug
places = service.places
seent = service.seent

View File

@@ -17,19 +17,17 @@ class reddit_scraper:
savefile = json.load(savefile)
try: self.seent = savefile["reddit"]
except: self.seent = {}
### REDDIT METHODS
# gets posts from a given subreddit
def scrape(self, sub, limit):
# make sure self.seent has the sub, add if not
if sub not in self.seent: self.seent[sub] = time.time()
# get posts that aren't in seent list
# get posts that aren't seent
post_list = []
posts = self.login.subreddit(sub).new(limit=limit)
posts = helper.reddit_listify(posts)
for p in posts[::-1]:
if helper.ts_older(p.created, self.seent[sub]):
if helper.ts_older(self.seent[sub], p.created):
break
logging.info(f"Scraping post {p.id}")
post_list.append(p)

View File

@@ -35,7 +35,7 @@ class scraper:
# downloads a given post's media and return the locations
def download(self, post):
logging.info(f"Downloading {post.id}... ")
result = self.login.download(post)
result = [] #self.login.download(post) neuter
logging.info(f"Done downloading {post.id}.")
return result