hopefully neuter works

This commit is contained in:
Luke Ogburn
2022-04-13 22:56:16 -04:00
parent f2e73bbda2
commit c36e3d75ff
4 changed files with 19 additions and 8 deletions

View File

@@ -21,6 +21,7 @@ you can run `pip3 install Mastodon.py praw` to install both of these.
**Likely**
- [ ] Keep track of what has been scraped and tooted to not duplicate posts
- [ ] Actually add neuter settings for development
- [ ] Debugging logging
- [ ] Move all vars into config
- [ ] Docker image

View File

@@ -18,8 +18,8 @@ def main():
# get config
config = json.load(open('config.json', 'r'))
# make bots
masto = bot(config)
reddit = scraper("reddit", config, low_activity_random=True)
masto = bot(config, neuter=True)
reddit = scraper("reddit", config, neuter=True)
# run bots
run(masto, reddit)
# buffer time bc posts only happen so often so why check

15
bot.py
View File

@@ -3,14 +3,20 @@ import logging
# Mastodon bot to post things
class bot():
def __init__(self, config):
def __init__(self, config, neuter=False):
self.neuter = neuter
self.masto = Mastodon(access_token=config["mastodon"]["access_token"], api_base_url=config["mastodon"]["host"])
# uploads media to mastodon, returns the mastodon ID
# specify mimetype of video files as "video/mp4" to avoid error
def upload_media(self, filename, mimetype=None):
logging.info(f"Uploading media {filename}")
return self.masto.media_post(filename, mime_type=mimetype)
if not self.neuter:
returnval = self.masto.media_post(filename, mime_type=mimetype)
else:
print(f"Would have uploaded {filename}")
returnval = True
return returnval
# uploads all given media
def upload_all_media(self, filenames):
@@ -21,4 +27,7 @@ class bot():
def toot(self, text, media=None):
logging.info(f"Posting:\n Text: {text}")
self.masto.status_post(text, media_ids=media)
if not self.neuter:
self.masto.status_post(text, media_ids=media)
else:
print(f"Would have posted {text}")

View File

@@ -4,7 +4,7 @@ import json
from reddit import reddit_scraper as reddit
class scraper:
def __init__(self, service, config, low_activity_random=False):
def __init__(self, service, config, neuter=False):
# error checking
scrapers = ["reddit"]
if service.lower() not in scrapers:
@@ -16,8 +16,8 @@ class scraper:
f = open("savefile.json", "w+")
f.write("{}")
# set object variables
self.low_activity_random = low_activity_random
self.service = service
self.neuter = neuter
# login to service
if service == "reddit": self.login = reddit(config)
@@ -35,7 +35,8 @@ class scraper:
# downloads a given post's media and return the locations
def download(self, post):
logging.info(f"Downloading {post.id}... ")
self.login.download(post)
if not self.neuter: self.login.download(post)
else: print(f"Neuter: would have downloaded {post} content")
logging.info(f"Done downloading {post.id}.")
return result