bug fixes

This commit is contained in:
nanos 2023-04-03 08:57:31 +01:00
parent 2b707e7807
commit 9f28ba2333
2 changed files with 9 additions and 8 deletions

View file

@ -109,7 +109,8 @@ Please find the list of all configuration options, including descriptions, below
| `MAX_FOLLOWERS` | `--max-followers` | No | Provide to backfill profiles for your most recent followers. Determines how many of your last followers you want to backfill. Recommended value: `80`. | `MAX_FOLLOWERS` | `--max-followers` | No | Provide to backfill profiles for your most recent followers. Determines how many of your last followers you want to backfill. Recommended value: `80`.
| `MAX_FOLLOW_REQUESTS` | `--max-follow-requests` | No | Provide to backfill profiles for the API key owner's most recent pending follow requests. Determines how many of your last follow requests you want to backfill. Recommended value: `80`. | `MAX_FOLLOW_REQUESTS` | `--max-follow-requests` | No | Provide to backfill profiles for the API key owner's most recent pending follow requests. Determines how many of your last follow requests you want to backfill. Recommended value: `80`.
| `FROM_NOTIFICATIONS` | `--from-notifications` | No | Provide to backfill profiles of anyone mentioned in your recent notifications. Determines how many hours of notifications you want to look at. Requires an access token with `read:notifications` scope. Recommended value: `1`, unless you run FediFetcher less than once per hour. | `FROM_NOTIFICATIONS` | `--from-notifications` | No | Provide to backfill profiles of anyone mentioned in your recent notifications. Determines how many hours of notifications you want to look at. Requires an access token with `read:notifications` scope. Recommended value: `1`, unless you run FediFetcher less than once per hour.
|`BACKFILL_WITH_CONTEXT` | `--backfill-with-context` | No | |`BACKFILL_WITH_CONTEXT` | `--backfill-with-context` | No | Set to `0` to disable fetching remote replies while backfilling profiles. This is enabled by default, but you can disable it, if it's too slow for you.
|`BACKFILL_MENTIONED_USERS` | `--backfill-mentioned-users` | No | Set to `0` to disable backfilling any mentioned users when fetching the home timeline. This is enabled by default, but you can disable it, if it's too slow for you.
| `REMEMBER_USERS_FOR_HOURS` | `--remember-users-for-hours` | No | How long between back-filling attempts for non-followed accounts? Defaults to `168`, i.e. one week. | `REMEMBER_USERS_FOR_HOURS` | `--remember-users-for-hours` | No | How long between back-filling attempts for non-followed accounts? Defaults to `168`, i.e. one week.
| `HTTP_TIMEOUT` | `--http-timeout` | No | The timeout for any HTTP requests to the Mastodon API in seconds. Defaults to `5`. | `HTTP_TIMEOUT` | `--http-timeout` | No | The timeout for any HTTP requests to the Mastodon API in seconds. Defaults to `5`.
| -- | `--lock-hours` | No | Determines after how many hours a lock file should be discarded. Not relevant when running the script as GitHub Action, as concurrency is prevented using a different mechanism. Recommended value: `24`. | -- | `--lock-hours` | No | Determines after how many hours a lock file should be discarded. Not relevant when running the script as GitHub Action, as concurrency is prevented using a different mechanism. Recommended value: `24`.

View file

@ -822,12 +822,12 @@ if __name__ == "__main__":
all_known_users = OrderedSet(list(known_followings) + list(recently_checked_users)) all_known_users = OrderedSet(list(known_followings) + list(recently_checked_users))
if arguments.reply_interval_hours > 0: if arguments.reply_interval_in_hours > 0:
"""pull the context toots of toots user replied to, from their """pull the context toots of toots user replied to, from their
original server, and add them to the local server.""" original server, and add them to the local server."""
user_ids = get_active_user_ids(arguments.server, arguments.access_token, arguments.reply_interval_hours) user_ids = get_active_user_ids(arguments.server, arguments.access_token, arguments.reply_interval_in_hours)
reply_toots = get_all_reply_toots( reply_toots = get_all_reply_toots(
arguments.server, user_ids, arguments.access_token, seen_urls, arguments.reply_interval_hours arguments.server, user_ids, arguments.access_token, seen_urls, arguments.reply_interval_in_hours
) )
known_context_urls = get_all_known_context_urls(arguments.server, reply_toots,parsed_urls) known_context_urls = get_all_known_context_urls(arguments.server, reply_toots,parsed_urls)
seen_urls.update(known_context_urls) seen_urls.update(known_context_urls)
@ -838,9 +838,9 @@ if __name__ == "__main__":
add_context_urls(arguments.server, arguments.access_token, context_urls, seen_urls) add_context_urls(arguments.server, arguments.access_token, context_urls, seen_urls)
if arguments.max_home_timeline_length > 0: if arguments.home_timeline_length > 0:
"""Do the same with any toots on the key owner's home timeline """ """Do the same with any toots on the key owner's home timeline """
timeline_toots = get_timeline(arguments.server, arguments.access_token, arguments.max_home_timeline_length) timeline_toots = get_timeline(arguments.server, arguments.access_token, arguments.home_timeline_length)
known_context_urls = get_all_known_context_urls(arguments.server, timeline_toots,parsed_urls) known_context_urls = get_all_known_context_urls(arguments.server, timeline_toots,parsed_urls)
add_context_urls(arguments.server, arguments.access_token, known_context_urls, seen_urls) add_context_urls(arguments.server, arguments.access_token, known_context_urls, seen_urls)
@ -867,13 +867,13 @@ if __name__ == "__main__":
if arguments.max_followings > 0: if arguments.max_followings > 0:
log(f"Getting posts from last {arguments.max_followings} followings") log(f"Getting posts from last {arguments.max_followings} followings")
user_id = get_user_id(arguments.server, arguments.backfill_followings_for_user, arguments.access_token) user_id = get_user_id(arguments.server, arguments.user, arguments.access_token)
followings = get_new_followings(arguments.server, user_id, arguments.max_followings, all_known_users) followings = get_new_followings(arguments.server, user_id, arguments.max_followings, all_known_users)
add_user_posts(arguments.server, arguments.access_token, followings, known_followings, all_known_users, seen_urls) add_user_posts(arguments.server, arguments.access_token, followings, known_followings, all_known_users, seen_urls)
if arguments.max_followers > 0: if arguments.max_followers > 0:
log(f"Getting posts from last {arguments.max_followers} followers") log(f"Getting posts from last {arguments.max_followers} followers")
user_id = get_user_id(arguments.server, arguments.backfill_followings_for_user, arguments.access_token) user_id = get_user_id(arguments.server, arguments.user, arguments.access_token)
followers = get_new_followers(arguments.server, user_id, arguments.max_followers, all_known_users) followers = get_new_followers(arguments.server, user_id, arguments.max_followers, all_known_users)
add_user_posts(arguments.server, arguments.access_token, followers, recently_checked_users, all_known_users, seen_urls) add_user_posts(arguments.server, arguments.access_token, followers, recently_checked_users, all_known_users, seen_urls)