hopefully neuter works
This commit is contained in:
@@ -21,6 +21,7 @@ you can run `pip3 install Mastodon.py praw` to install both of these.
|
|||||||
|
|
||||||
**Likely**
|
**Likely**
|
||||||
- [ ] Keep track of what has been scraped and tooted to not duplicate posts
|
- [ ] Keep track of what has been scraped and tooted to not duplicate posts
|
||||||
|
- [ ] Actually add neuter settings for development
|
||||||
- [ ] Debugging logging
|
- [ ] Debugging logging
|
||||||
- [ ] Move all vars into config
|
- [ ] Move all vars into config
|
||||||
- [ ] Docker image
|
- [ ] Docker image
|
||||||
|
|||||||
@@ -18,8 +18,8 @@ def main():
|
|||||||
# get config
|
# get config
|
||||||
config = json.load(open('config.json', 'r'))
|
config = json.load(open('config.json', 'r'))
|
||||||
# make bots
|
# make bots
|
||||||
masto = bot(config)
|
masto = bot(config, neuter=True)
|
||||||
reddit = scraper("reddit", config, low_activity_random=True)
|
reddit = scraper("reddit", config, neuter=True)
|
||||||
# run bots
|
# run bots
|
||||||
run(masto, reddit)
|
run(masto, reddit)
|
||||||
# buffer time bc posts only happen so often so why check
|
# buffer time bc posts only happen so often so why check
|
||||||
|
|||||||
13
bot.py
13
bot.py
@@ -3,14 +3,20 @@ import logging
|
|||||||
|
|
||||||
# Mastodon bot to post things
|
# Mastodon bot to post things
|
||||||
class bot():
|
class bot():
|
||||||
def __init__(self, config):
|
def __init__(self, config, neuter=False):
|
||||||
|
self.neuter = neuter
|
||||||
self.masto = Mastodon(access_token=config["mastodon"]["access_token"], api_base_url=config["mastodon"]["host"])
|
self.masto = Mastodon(access_token=config["mastodon"]["access_token"], api_base_url=config["mastodon"]["host"])
|
||||||
|
|
||||||
# uploads media to mastodon, returns the mastodon ID
|
# uploads media to mastodon, returns the mastodon ID
|
||||||
# specify mimetype of video files as "video/mp4" to avoid error
|
# specify mimetype of video files as "video/mp4" to avoid error
|
||||||
def upload_media(self, filename, mimetype=None):
|
def upload_media(self, filename, mimetype=None):
|
||||||
logging.info(f"Uploading media {filename}")
|
logging.info(f"Uploading media {filename}")
|
||||||
return self.masto.media_post(filename, mime_type=mimetype)
|
if not self.neuter:
|
||||||
|
returnval = self.masto.media_post(filename, mime_type=mimetype)
|
||||||
|
else:
|
||||||
|
print(f"Would have uploaded {filename}")
|
||||||
|
returnval = True
|
||||||
|
return returnval
|
||||||
|
|
||||||
# uploads all given media
|
# uploads all given media
|
||||||
def upload_all_media(self, filenames):
|
def upload_all_media(self, filenames):
|
||||||
@@ -21,4 +27,7 @@ class bot():
|
|||||||
|
|
||||||
def toot(self, text, media=None):
|
def toot(self, text, media=None):
|
||||||
logging.info(f"Posting:\n Text: {text}")
|
logging.info(f"Posting:\n Text: {text}")
|
||||||
|
if not self.neuter:
|
||||||
self.masto.status_post(text, media_ids=media)
|
self.masto.status_post(text, media_ids=media)
|
||||||
|
else:
|
||||||
|
print(f"Would have posted {text}")
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ import json
|
|||||||
from reddit import reddit_scraper as reddit
|
from reddit import reddit_scraper as reddit
|
||||||
|
|
||||||
class scraper:
|
class scraper:
|
||||||
def __init__(self, service, config, low_activity_random=False):
|
def __init__(self, service, config, neuter=False):
|
||||||
# error checking
|
# error checking
|
||||||
scrapers = ["reddit"]
|
scrapers = ["reddit"]
|
||||||
if service.lower() not in scrapers:
|
if service.lower() not in scrapers:
|
||||||
@@ -16,8 +16,8 @@ class scraper:
|
|||||||
f = open("savefile.json", "w+")
|
f = open("savefile.json", "w+")
|
||||||
f.write("{}")
|
f.write("{}")
|
||||||
# set object variables
|
# set object variables
|
||||||
self.low_activity_random = low_activity_random
|
|
||||||
self.service = service
|
self.service = service
|
||||||
|
self.neuter = neuter
|
||||||
# login to service
|
# login to service
|
||||||
if service == "reddit": self.login = reddit(config)
|
if service == "reddit": self.login = reddit(config)
|
||||||
|
|
||||||
@@ -35,7 +35,8 @@ class scraper:
|
|||||||
# downloads a given post's media and return the locations
|
# downloads a given post's media and return the locations
|
||||||
def download(self, post):
|
def download(self, post):
|
||||||
logging.info(f"Downloading {post.id}... ")
|
logging.info(f"Downloading {post.id}... ")
|
||||||
self.login.download(post)
|
if not self.neuter: self.login.download(post)
|
||||||
|
else: print(f"Neuter: would have downloaded {post} content")
|
||||||
logging.info(f"Done downloading {post.id}.")
|
logging.info(f"Done downloading {post.id}.")
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user