From 96dba41c9b860da838624a6a44864814d690f609 Mon Sep 17 00:00:00 2001 From: Ryan Barrett Date: Fri, 11 Feb 2022 22:38:56 -0800 Subject: [PATCH] logging: use separate loggers for each module with their names --- activitypub.py | 18 ++++++++++-------- common.py | 20 +++++++++++--------- models.py | 8 +++++--- redirect.py | 12 +++++++----- salmon.py | 10 ++++++---- superfeedr.py | 4 +++- tests/test_common.py | 2 -- webfinger.py | 14 ++++++++------ webmention.py | 26 ++++++++++++++------------ 9 files changed, 64 insertions(+), 50 deletions(-) diff --git a/activitypub.py b/activitypub.py index fa2cf79..ed62d74 100644 --- a/activitypub.py +++ b/activitypub.py @@ -20,6 +20,8 @@ from common import redirect_unwrap, redirect_wrap from models import Follower, MagicKey from httpsig.requests_auth import HTTPSignatureAuth +logger = logging.getLogger(__name__) + CACHE_TIME = datetime.timedelta(seconds=15) SUPPORTED_TYPES = ( @@ -49,7 +51,7 @@ def send(activity, inbox_url, user_domain): Returns: requests.Response """ - logging.info('Sending AP request from {user_domain}: {json_dumps(activity, indent=2)}') + logger.info('Sending AP request from {user_domain}: {json_dumps(activity, indent=2)}') # prepare HTTP Signature (required by Mastodon) # https://w3c.github.io/activitypub/#authorization @@ -88,7 +90,7 @@ def actor(domain): mf2 = util.fetch_mf2(f'http://{domain}/', gateway=True, headers=common.HEADERS) hcard = mf2util.representative_hcard(mf2, mf2['url']) - logging.info(f'Representative h-card: {json_dumps(hcard, indent=2)}') + logger.info(f'Representative h-card: {json_dumps(hcard, indent=2)}') if not hcard: error(f"Couldn't find a representative h-card (http://microformats.org/wiki/representative-hcard-parsing) on {mf2['url']}") @@ -102,7 +104,7 @@ def actor(domain): 'following': f'{request.host_url}{domain}/following', 'followers': f'{request.host_url}{domain}/followers', }) - logging.info(f'Returning: {json_dumps(obj, indent=2)}') + logger.info(f'Returning: {json_dumps(obj, indent=2)}') return (obj, { 'Content-Type': common.CONTENT_TYPE_AS2, @@ -114,7 +116,7 @@ def actor(domain): def inbox(domain): """Accepts POSTs to /[DOMAIN]/inbox and converts to outbound webmentions.""" body = request.get_data(as_text=True) - logging.info(f'Got: {body}') + logger.info(f'Got: {body}') # parse and validate AS2 activity try: @@ -186,7 +188,7 @@ def accept_follow(follow, follow_unwrapped): follow: dict, AP Follow activity follow_unwrapped: dict, same, except with redirect URLs unwrapped """ - logging.info('Replying to Follow with Accept') + logger.info('Replying to Follow with Accept') followee = follow.get('object') followee_unwrapped = follow_unwrapped.get('object') @@ -232,7 +234,7 @@ def undo_follow(undo_unwrapped): Args: undo_unwrapped: dict, AP Undo activity with redirect URLs unwrapped """ - logging.info('Undoing Follow') + logger.info('Undoing Follow') follow = undo_unwrapped.get('object', {}) follower = follow.get('actor') @@ -244,10 +246,10 @@ def undo_follow(undo_unwrapped): user_domain = util.domain_from_link(followee) follower_obj = Follower.get_by_id(Follower._id(user_domain, follower)) if follower_obj: - logging.info(f'Marking {follower_obj.key} as inactive') + logger.info(f'Marking {follower_obj.key} as inactive') follower_obj.status = 'inactive' follower_obj.put() else: - logging.warning(f'No Follower found for {user_domain} {follower}') + logger.warning(f'No Follower found for {user_domain} {follower}') # TODO send webmention with 410 of u-follow diff --git a/common.py b/common.py index 308fa0e..75326ff 100644 --- a/common.py +++ b/common.py @@ -16,6 +16,8 @@ from werkzeug.exceptions import BadGateway from models import Response +logger = logging.getLogger(__name__) + DOMAIN_RE = r'([^/:]+\.[^/:]+)' ACCT_RE = r'(?:acct:)?([^@]+)@' + DOMAIN_RE TLD_BLOCKLIST = ('7z', 'asp', 'aspx', 'gif', 'html', 'ico', 'jpg', 'jpeg', 'js', @@ -78,18 +80,18 @@ def _requests_fn(fn, url, parse_json=False, **kwargs): kwargs.setdefault('headers', {}).update(HEADERS) resp = fn(url, gateway=True, **kwargs) - logging.info(f'Got {resp.status_code} headers: {resp.headers}') + logger.info(f'Got {resp.status_code} headers: {resp.headers}') type = content_type(resp) if (type and type != 'text/html' and (type.startswith('text/') or type.endswith('+json') or type.endswith('/json'))): - logging.info(resp.text) + logger.info(resp.text) if parse_json: try: return resp.json() except ValueError: msg = "Couldn't parse response as JSON" - logging.info(msg, exc_info=True) + logger.info(msg, exc_info=True) raise BadGateway(msg) return resp @@ -116,7 +118,7 @@ def get_as2(url): """ def _error(resp): msg = "Couldn't fetch %s as ActivityStreams 2" % url - logging.warning(msg) + logger.warning(msg) err = BadGateway(msg) err.requests_response = resp raise err @@ -192,7 +194,7 @@ def send_webmentions(activity_wrapped, proxy=None, **response_props): errors = [] # stores (code, body) tuples for target in targets: if util.domain_from_link(target) == util.domain_from_link(source): - logging.info(f'Skipping same-domain webmention from {source} to {target}') + logger.info(f'Skipping same-domain webmention from {source} to {target}') continue response = Response(source=source, target=target, direction='in', @@ -201,17 +203,17 @@ def send_webmentions(activity_wrapped, proxy=None, **response_props): wm_source = (response.proxy_url() if verb in ('follow', 'like', 'share') or proxy else source) - logging.info(f'Sending webmention from {wm_source} to {target}') + logger.info(f'Sending webmention from {wm_source} to {target}') try: endpoint = webmention.discover(target, headers=HEADERS).endpoint if endpoint: webmention.send(endpoint, wm_source, target, headers=HEADERS) response.status = 'complete' - logging.info('Success!') + logger.info('Success!') else: response.status = 'ignored' - logging.info('Ignoring.') + logger.info('Ignoring.') except BaseException as e: errors.append(util.interpret_http_exception(e)) response.put() @@ -262,7 +264,7 @@ def postprocess_as2(activity, target=None, key=None): activity['inReplyTo'] = target_id elif isinstance(in_reply_to, list): if len(in_reply_to) > 1: - logging.warning( + logger.warning( "AS2 doesn't support multiple inReplyTo URLs! " 'Only using the first: %s' % in_reply_to[0]) activity['inReplyTo'] = in_reply_to[0] diff --git a/models.py b/models.py index d2a211b..3efa993 100644 --- a/models.py +++ b/models.py @@ -8,6 +8,8 @@ from flask import request from google.cloud import ndb from oauth_dropins.webutil.models import StringIdModel +logger = logging.getLogger(__name__) + class MagicKey(StringIdModel): """Stores a user's public/private key pair used for Magic Signatures. @@ -87,12 +89,12 @@ class Response(StringIdModel): if source and target: assert 'id' not in kwargs kwargs['id'] = self._id(source, target) - logging.info(f"Response id (source target): {kwargs['id']}") + logger.info(f"Response id (source target): {kwargs['id']}") super(Response, self).__init__(**kwargs) @classmethod def get_or_create(cls, source=None, target=None, **kwargs): - logging.info(f'Response source target: {source} {target}') + logger.info(f'Response source target: {source} {target}') return cls.get_or_insert(cls._id(source, target), **kwargs) def source(self): @@ -149,6 +151,6 @@ class Follower(StringIdModel): @classmethod def get_or_create(cls, user_domain, follower_id, **kwargs): - logging.info(f'new Follower for {user_domain} {follower_id}') + logger.info(f'new Follower for {user_domain} {follower_id}') return cls.get_or_insert(cls._id(user_domain, follower_id), **kwargs) diff --git a/redirect.py b/redirect.py index 01992c1..df1451f 100644 --- a/redirect.py +++ b/redirect.py @@ -25,6 +25,8 @@ from app import app, cache import common from models import MagicKey +logger = logging.getLogger(__name__) + CACHE_TIME = datetime.timedelta(seconds=15) @@ -49,10 +51,10 @@ def redir(to): urllib.parse.urlparse(to).hostname)) for domain in domains: if domain and MagicKey.get_by_id(domain): - logging.info(f'Found MagicKey for domain {domain}') + logger.info(f'Found MagicKey for domain {domain}') break else: - logging.info(f'No user found for any of {domains}; returning 404') + logger.info(f'No user found for any of {domains}; returning 404') abort(404) # poor man's conneg, only handle single Accept values, not multiple with @@ -62,7 +64,7 @@ def redir(to): return convert_to_as2(to) # redirect - logging.info(f'redirecting to {to}') + logger.info(f'redirecting to {to}') return redirect(to, code=301) @@ -74,10 +76,10 @@ def convert_to_as2(url): """ mf2 = util.fetch_mf2(url) entry = mf2util.find_first_entry(mf2, ['h-entry']) - logging.info(f"Parsed mf2 for {mf2['url']}: {json_dumps(entry, indent=2)}") + logger.info(f"Parsed mf2 for {mf2['url']}: {json_dumps(entry, indent=2)}") obj = common.postprocess_as2(as2.from_as1(microformats2.json_to_object(entry))) - logging.info(f'Returning: {json_dumps(obj, indent=2)}') + logger.info(f'Returning: {json_dumps(obj, indent=2)}') return obj, { 'Content-Type': common.CONTENT_TYPE_AS2, diff --git a/salmon.py b/salmon.py index 6893d95..b10a813 100644 --- a/salmon.py +++ b/salmon.py @@ -16,6 +16,8 @@ from oauth_dropins.webutil.flask_util import error from app import app import common +logger = logging.getLogger(__name__) + # from django_salmon.feeds ATOM_NS = 'http://www.w3.org/2005/Atom' ATOM_THREADING_NS = 'http://purl.org/syndication/thread/1.0' @@ -36,14 +38,14 @@ def slap(acct): """Accepts POSTs to /[ACCT]/salmon and converts to outbound webmentions.""" # TODO: unify with activitypub body = request.get_data(as_text=True) - logging.info(f'Got: {body}') + logger.info(f'Got: {body}') try: parsed = utils.parse_magic_envelope(body) except ParseError as e: error('Could not parse POST body as XML', exc_info=True) data = parsed['data'] - logging.info(f'Decoded: {data}') + logger.info(f'Decoded: {data}') # check that we support this activity type try: @@ -62,10 +64,10 @@ def slap(acct): elif not author.startswith('acct:'): error(f'Author URI {author} has unsupported scheme; expected acct:') - logging.info(f'Fetching Salmon key for {author}') + logger.info(f'Fetching Salmon key for {author}') if not magicsigs.verify(data, parsed['sig'], author_uri=author): error('Could not verify magic signature.') - logging.info('Verified magic signature.') + logger.info('Verified magic signature.') # Verify that the timestamp is recent. Required by spec. # I get that this helps prevent spam, but in practice it's a bit silly, diff --git a/superfeedr.py b/superfeedr.py index ef6678f..2424e0f 100644 --- a/superfeedr.py +++ b/superfeedr.py @@ -10,6 +10,8 @@ from flask import request from app import app +logger = logging.getLogger(__name__) + @app.route(r'/superfeedr/', methods=['GET', 'POST']) @app.route(r'/superfeedr/', methods=['GET', 'POST']) @@ -18,5 +20,5 @@ def superfeedr(_=None): https://documentation.superfeedr.com/publishers.html#subscription-callback """ - logging.info(f'Got:\n{request.get_data(as_text=True)}') + logger.info(f'Got:\n{request.get_data(as_text=True)}') return '', 204 diff --git a/tests/test_common.py b/tests/test_common.py index f770055..e86240d 100644 --- a/tests/test_common.py +++ b/tests/test_common.py @@ -1,7 +1,5 @@ # coding=utf-8 """Unit tests for common.py.""" -import logging -import os from unittest import mock from oauth_dropins.webutil import util diff --git a/webfinger.py b/webfinger.py index 858880f..feaae19 100644 --- a/webfinger.py +++ b/webfinger.py @@ -22,6 +22,8 @@ import models CACHE_TIME = datetime.timedelta(seconds=15) NON_TLDS = frozenset(('html', 'json', 'php', 'xml')) +logger = logging.getLogger(__name__) + # TODO # @cache.cached( @@ -34,7 +36,7 @@ class User(flask_util.XrdOrJrd): return 'webfinger_user' def template_vars(self, domain=None, url=None): - logging.debug(f'Headers: {list(request.headers.items())}') + logger.debug(f'Headers: {list(request.headers.items())}') if domain.split('.')[-1] in NON_TLDS: error(f"{domain} doesn't look like a domain", status=404) @@ -48,15 +50,15 @@ class User(flask_util.XrdOrJrd): resp = common.requests_get(candidate) parsed = util.parse_html(resp) mf2 = util.parse_mf2(parsed, url=resp.url) - # logging.debug(f'Parsed mf2 for {resp.url}: {json_dumps(mf2, indent=2)}') + # logger.debug(f'Parsed mf2 for {resp.url}: {json_dumps(mf2, indent=2)}') hcard = mf2util.representative_hcard(mf2, resp.url) if hcard: - logging.info(f'Representative h-card: {json_dumps(hcard, indent=2)}') + logger.info(f'Representative h-card: {json_dumps(hcard, indent=2)}') break else: error(f"didn't find a representative h-card (http://microformats.org/wiki/representative-hcard-parsing) on {resp.url}") - logging.info(f'Generating WebFinger data for {domain}') + logger.info(f'Generating WebFinger data for {domain}') key = models.MagicKey.get_or_create(domain) props = hcard.get('properties', {}) urls = util.dedupe_urls(props.get('url', []) + [resp.url]) @@ -68,7 +70,7 @@ class User(flask_util.XrdOrJrd): urluser, urldomain = util.parse_acct_uri(url) if urldomain == domain: acct = f'{urluser}@{domain}' - logging.info(f'Found custom username: acct:{acct}') + logger.info(f'Found custom username: acct:{acct}') break # discover atom feed, if any @@ -139,7 +141,7 @@ class User(flask_util.XrdOrJrd): 'href': f'{request.host_url}{domain}/salmon', }] }) - logging.info(f'Returning WebFinger data: {json_dumps(data, indent=2)}') + logger.info(f'Returning WebFinger data: {json_dumps(data, indent=2)}') return data diff --git a/webmention.py b/webmention.py index 4929616..b41a832 100644 --- a/webmention.py +++ b/webmention.py @@ -27,6 +27,8 @@ from app import app import common from models import Follower, MagicKey, Response +logger = logging.getLogger(__name__) + SKIP_EMAIL_DOMAINS = frozenset(('localhost', 'snarfed.org')) @@ -39,7 +41,7 @@ class Webmention(View): target_resp = None # requests.Response def dispatch_request(self): - logging.info(f'Params: {list(request.form.items())}') + logger.info(f'Params: {list(request.form.items())}') # fetch source page source = flask_util.get_required_param('source') @@ -48,7 +50,7 @@ class Webmention(View): self.source_domain = urllib.parse.urlparse(self.source_url).netloc.split(':')[0] self.source_mf2 = util.parse_mf2(source_resp) - # logging.debug(f'Parsed mf2 for {source_resp.url} : {json_dumps(self.source_mf2 indent=2)}') + # logger.debug(f'Parsed mf2 for {source_resp.url} : {json_dumps(self.source_mf2 indent=2)}') # check for backlink to bridgy fed (for webmention spec and to confirm # source's intent to federate to mastodon) @@ -61,7 +63,7 @@ class Webmention(View): if not entry: error(f'No microformats2 found on {self.source_url}') - logging.info(f'First entry: {json_dumps(entry, indent=2)}') + logger.info(f'First entry: {json_dumps(entry, indent=2)}') # make sure it has url, since we use that for AS2 id, which is required # for ActivityPub. props = entry.setdefault('properties', {}) @@ -69,7 +71,7 @@ class Webmention(View): props['url'] = [self.source_url] self.source_obj = microformats2.json_to_object(entry, fetch_mf2=True) - logging.info(f'Converted to AS1: {json_dumps(self.source_obj, indent=2)}') + logger.info(f'Converted to AS1: {json_dumps(self.source_obj, indent=2)}') for method in self.try_activitypub, self.try_salmon: ret = method() @@ -112,7 +114,7 @@ class Webmention(View): new_content = content(self.source_mf2) if orig_content and new_content and orig_content == new_content: msg = f'Skipping; new content is same as content published before at {resp.updated}' - logging.info(msg) + logger.info(msg) return msg source_activity['type'] = 'Update' @@ -241,7 +243,7 @@ class Webmention(View): if targets: target = targets[0] if not target: - logging.warning("No targets or followers. Ignoring.") + logger.warning("No targets or followers. Ignoring.") return status = None @@ -285,7 +287,7 @@ class Webmention(View): feed = common.requests_get(atom_url).text parsed = feedparser.parse(feed) entry = parsed.entries[0] - logging.info(f'Parsed: {json_dumps(entry, indent=2)}') + logger.info(f'Parsed: {json_dumps(entry, indent=2)}') target_id = entry.id in_reply_to = self.source_obj.get('inReplyTo') source_obj_obj = self.source_obj.get('object') @@ -307,7 +309,7 @@ class Webmention(View): self.source_obj.setdefault('tags', []).append({'url': url}) # extract and discover salmon endpoint - logging.info(f'Discovering Salmon endpoint in {atom_url}') + logger.info(f'Discovering Salmon endpoint in {atom_url}') endpoint = django_salmon.discover_salmon_endpoint(feed) if not endpoint: @@ -328,23 +330,23 @@ class Webmention(View): if not endpoint: error('No salmon endpoint found!') - logging.info(f'Discovered Salmon endpoint {endpoint}') + logger.info(f'Discovered Salmon endpoint {endpoint}') # construct reply Atom object activity = self.source_obj if self.source_obj.get('verb') not in source.VERBS_WITH_OBJECT: activity = {'object': self.source_obj} entry = atom.activity_to_atom(activity, xml_base=self.source_url) - logging.info(f'Converted {self.source_url} to Atom:\n{entry}') + logger.info(f'Converted {self.source_url} to Atom:\n{entry}') # sign reply and wrap in magic envelope domain = urllib.parse.urlparse(self.source_url).netloc key = MagicKey.get_or_create(domain) - logging.info(f'Using key for {domain}: {key}') + logger.info(f'Using key for {domain}: {key}') magic_envelope = magicsigs.magic_envelope( entry, common.CONTENT_TYPE_ATOM, key).decode() - logging.info(f'Sending Salmon slap to {endpoint}') + logger.info(f'Sending Salmon slap to {endpoint}') common.requests_post( endpoint, data=common.XML_UTF8 + magic_envelope, headers={'Content-Type': common.CONTENT_TYPE_MAGIC_ENVELOPE})