From f11af8de168aab368c70e6770315da5897e38967 Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Sat, 14 Mar 2020 18:09:18 +0100 Subject: [PATCH 001/184] update dev instructions --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index a883ed5..3f0b79c 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ environment variables: ## Dev ``` -pip3 install --user matrix-client +pip3 install --user markdown matrix-nio ./matrix_webhook.py ``` From 38e814c9565734dc19e04c6b45523d0fca813eae Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Sat, 14 Mar 2020 18:10:32 +0100 Subject: [PATCH 002/184] format readme --- README.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 3f0b79c..3597a12 100644 --- a/README.md +++ b/README.md @@ -34,7 +34,8 @@ docker-compose up -d ## Test / Usage ``` -curl -d '{"text":"new contrib from toto: [44](http://radio.localhost/map/#44)", "key": "secret"}' 'matrixwebhook.localhost/!DPrUlnwOhBEfYwsDLh:matrix.org' +curl -d '{"text":"new contrib from toto: [44](http://radio.localhost/map/#44)", "key": "secret"}' \ + 'http://matrixwebhook.localhost/!DPrUlnwOhBEfYwsDLh:matrix.org' ``` (or localhost:4785 without docker) From ce83079d59f941d9eb3e2afacf9f3b6e8dd8085c Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Mon, 20 Apr 2020 19:18:53 +0200 Subject: [PATCH 003/184] markdown: extra https://python-markdown.github.io/extensions/extra/ To get
 tags on fenced code
---
 matrix_webhook.py | 5 ++---
 1 file changed, 2 insertions(+), 3 deletions(-)

diff --git a/matrix_webhook.py b/matrix_webhook.py
index 6f18f75..eb4e06c 100755
--- a/matrix_webhook.py
+++ b/matrix_webhook.py
@@ -13,9 +13,8 @@ import os
 from http import HTTPStatus
 from signal import SIGINT, SIGTERM
 
-from markdown import markdown
-
 from aiohttp import web
+from markdown import markdown
 from nio import AsyncClient
 
 SERVER_ADDRESS = ('', int(os.environ.get('PORT', 4785)))
@@ -45,7 +44,7 @@ async def handler(request):
                                        "msgtype": "m.text",
                                        "body": data['text'],
                                        "format": "org.matrix.custom.html",
-                                       "formatted_body": markdown(data['text']),
+                                       "formatted_body": markdown(data['text'], extensions=['extra']),
                                    })
 
     return web.Response(text='{"status": %i, "ret": "%s"}' % (status, ret),

From 9615cdf3c79dc2d2877f5334d9d8f01492413667 Mon Sep 17 00:00:00 2001
From: Guilhem Saurel 
Date: Wed, 13 May 2020 00:29:57 +0200
Subject: [PATCH 004/184] traefik v2

---
 docker-compose.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/docker-compose.yml b/docker-compose.yml
index b5fe893..26941b0 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -14,4 +14,4 @@ services:
       - web
     labels:
       traefik.enable: "true"
-      traefik.frontend.rule: "Host: ${CHATONS_SERVICE:-matrixwebhook}.${CHATONS_DOMAIN:-localhost}, www.${CHATONS_SERVICE:-matrixwebhook}.${CHATONS_DOMAIN:-localhost}"
+      traefik.http.routers.matrix-webhook.rule: "Host(`${CHATONS_SERVICE:-matrixwebhook}.${CHATONS_DOMAIN:-localhost}`, `www.${CHATONS_SERVICE:-matrixwebhook}.${CHATONS_DOMAIN:-localhost}`)"

From 9883782f3a8486fc91e33485464e405220c8a686 Mon Sep 17 00:00:00 2001
From: Guilhem Saurel 
Date: Sun, 7 Jun 2020 10:31:21 +0200
Subject: [PATCH 005/184] remove the need for www

---
 README.md          | 3 +--
 docker-compose.yml | 2 +-
 2 files changed, 2 insertions(+), 3 deletions(-)

diff --git a/README.md b/README.md
index 3597a12..ca3fe6d 100644
--- a/README.md
+++ b/README.md
@@ -24,8 +24,7 @@ pip3 install --user markdown matrix-nio
 - Use [Traefik](https://traefik.io/) on the `web` docker network, eg. with
   [proxyta.net](https://framagit.org/oxyta.net/proxyta.net)
 - Put the configuration into a `.env` file
-- Configure your DNS for `${CHATONS_SERVICE:-matrixwebhook}.${CHATONS_DOMAIN:-localhost}` **and**
-  `www.${CHATONS_SERVICE:-matrixwebhook}.${CHATONS_DOMAIN:-localhost}`
+- Configure your DNS for `${CHATONS_SERVICE:-matrixwebhook}.${CHATONS_DOMAIN:-localhost}`
 
 ```
 docker-compose up -d
diff --git a/docker-compose.yml b/docker-compose.yml
index 26941b0..f19249a 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -14,4 +14,4 @@ services:
       - web
     labels:
       traefik.enable: "true"
-      traefik.http.routers.matrix-webhook.rule: "Host(`${CHATONS_SERVICE:-matrixwebhook}.${CHATONS_DOMAIN:-localhost}`, `www.${CHATONS_SERVICE:-matrixwebhook}.${CHATONS_DOMAIN:-localhost}`)"
+      traefik.http.routers.matrix-webhook.rule: "Host(`${CHATONS_SERVICE:-matrixwebhook}.${CHATONS_DOMAIN:-localhost}`)"

From 887dc95e3d04338fd8ee21071ac745fe8adeb1b0 Mon Sep 17 00:00:00 2001
From: Guilhem Saurel 
Date: Mon, 8 Jun 2020 09:59:51 +0200
Subject: [PATCH 006/184] handle ill-formed JSON

---
 matrix_webhook.py | 8 ++++++--
 1 file changed, 6 insertions(+), 2 deletions(-)

diff --git a/matrix_webhook.py b/matrix_webhook.py
index eb4e06c..0374774 100755
--- a/matrix_webhook.py
+++ b/matrix_webhook.py
@@ -32,8 +32,12 @@ async def handler(request):
     This one handles a POST, checks its content, and forwards it to the matrix room.
     """
     data = await request.read()
-    data = json.loads(data.decode())
-    status, ret = HTTPStatus.BAD_REQUEST, 'I need a json dict with text & key'
+    try:
+        data = json.loads(data.decode())
+        status, ret = HTTPStatus.BAD_REQUEST, 'I need a json dict with text & key'
+    except json.decoder.JSONDecodeError:
+        data = {}
+        status, ret = HTTPStatus.BAD_REQUEST, 'This was not valid JSON'
     if all(key in data for key in ['text', 'key']):
         status, ret = HTTPStatus.UNAUTHORIZED, 'I need the good "key"'
         if data['key'] == API_KEY:

From 6c138a65b409cbfae176a32e11df4b2c5f719a87 Mon Sep 17 00:00:00 2001
From: Guilhem Saurel 
Date: Tue, 28 Jul 2020 21:56:05 +0200
Subject: [PATCH 007/184] try another login on connection lost

---
 matrix_webhook.py | 24 ++++++++++++++++--------
 1 file changed, 16 insertions(+), 8 deletions(-)

diff --git a/matrix_webhook.py b/matrix_webhook.py
index 0374774..5ffb69f 100755
--- a/matrix_webhook.py
+++ b/matrix_webhook.py
@@ -16,6 +16,7 @@ from signal import SIGINT, SIGTERM
 from aiohttp import web
 from markdown import markdown
 from nio import AsyncClient
+from nio.exceptions import LocalProtocolError
 
 SERVER_ADDRESS = ('', int(os.environ.get('PORT', 4785)))
 MATRIX_URL = os.environ.get('MATRIX_URL', 'https://matrix.org')
@@ -42,14 +43,21 @@ async def handler(request):
         status, ret = HTTPStatus.UNAUTHORIZED, 'I need the good "key"'
         if data['key'] == API_KEY:
             status, ret = HTTPStatus.OK, 'OK'
-            await CLIENT.room_send(room_id=str(request.rel_url)[1:],
-                                   message_type="m.room.message",
-                                   content={
-                                       "msgtype": "m.text",
-                                       "body": data['text'],
-                                       "format": "org.matrix.custom.html",
-                                       "formatted_body": markdown(data['text'], extensions=['extra']),
-                                   })
+            content = {
+                "msgtype": "m.text",
+                "body": data['text'],
+                "format": "org.matrix.custom.html",
+                "formatted_body": markdown(data['text'], extensions=['extra']),
+            }
+            try:
+                await CLIENT.room_send(room_id=str(request.rel_url)[1:],
+                                       message_type="m.room.message",
+                                       content=content)
+            except LocalProtocolError:  # Connection lost, try another login
+                await CLIENT.login(MATRIX_PW)
+                await CLIENT.room_send(room_id=str(request.rel_url)[1:],
+                                       message_type="m.room.message",
+                                       content=content)
 
     return web.Response(text='{"status": %i, "ret": "%s"}' % (status, ret),
                         content_type='application/json',

From cf054631e85f405fe8a25a58caaedaa95aadeed5 Mon Sep 17 00:00:00 2001
From: Jochen Kupperschmidt 
Date: Sun, 27 Dec 2020 14:07:04 +0100
Subject: [PATCH 008/184] Extract room ID into variable

---
 matrix_webhook.py | 5 +++--
 1 file changed, 3 insertions(+), 2 deletions(-)

diff --git a/matrix_webhook.py b/matrix_webhook.py
index 5ffb69f..9e5fb44 100755
--- a/matrix_webhook.py
+++ b/matrix_webhook.py
@@ -43,6 +43,7 @@ async def handler(request):
         status, ret = HTTPStatus.UNAUTHORIZED, 'I need the good "key"'
         if data['key'] == API_KEY:
             status, ret = HTTPStatus.OK, 'OK'
+            room_id = str(request.rel_url)[1:]
             content = {
                 "msgtype": "m.text",
                 "body": data['text'],
@@ -50,12 +51,12 @@ async def handler(request):
                 "formatted_body": markdown(data['text'], extensions=['extra']),
             }
             try:
-                await CLIENT.room_send(room_id=str(request.rel_url)[1:],
+                await CLIENT.room_send(room_id=room_id,
                                        message_type="m.room.message",
                                        content=content)
             except LocalProtocolError:  # Connection lost, try another login
                 await CLIENT.login(MATRIX_PW)
-                await CLIENT.room_send(room_id=str(request.rel_url)[1:],
+                await CLIENT.room_send(room_id=room_id,
                                        message_type="m.room.message",
                                        content=content)
 

From 54baf29d513aca87f79e969efc51064d0669f2ad Mon Sep 17 00:00:00 2001
From: Jochen Kupperschmidt 
Date: Sun, 27 Dec 2020 14:11:51 +0100
Subject: [PATCH 009/184] Extract function to send message

Merges duplicated code.
---
 matrix_webhook.py | 15 +++++++++------
 1 file changed, 9 insertions(+), 6 deletions(-)

diff --git a/matrix_webhook.py b/matrix_webhook.py
index 9e5fb44..903c4bb 100755
--- a/matrix_webhook.py
+++ b/matrix_webhook.py
@@ -51,20 +51,23 @@ async def handler(request):
                 "formatted_body": markdown(data['text'], extensions=['extra']),
             }
             try:
-                await CLIENT.room_send(room_id=room_id,
-                                       message_type="m.room.message",
-                                       content=content)
+                await send_room_message(room_id, content)
             except LocalProtocolError:  # Connection lost, try another login
                 await CLIENT.login(MATRIX_PW)
-                await CLIENT.room_send(room_id=room_id,
-                                       message_type="m.room.message",
-                                       content=content)
+                await send_room_message(room_id, content)
 
     return web.Response(text='{"status": %i, "ret": "%s"}' % (status, ret),
                         content_type='application/json',
                         status=status)
 
 
+async def send_room_message(room_id, content):
+    """Send a message to a room."""
+    return await CLIENT.room_send(room_id=room_id,
+                                  message_type='m.room.message',
+                                  content=content)
+
+
 async def main(event):
     """
     Launch main coroutine.

From 139ec1670cbeb6515665ab97035bdf9e0f2c5861 Mon Sep 17 00:00:00 2001
From: Jochen Kupperschmidt 
Date: Sun, 27 Dec 2020 14:37:54 +0100
Subject: [PATCH 010/184] Use `aiohttp.web.json_response()`

Avoids explicit setting of JSON content type, handles serialization to
JSON.
---
 matrix_webhook.py | 10 +++++++---
 1 file changed, 7 insertions(+), 3 deletions(-)

diff --git a/matrix_webhook.py b/matrix_webhook.py
index 903c4bb..12f47d2 100755
--- a/matrix_webhook.py
+++ b/matrix_webhook.py
@@ -56,9 +56,13 @@ async def handler(request):
                 await CLIENT.login(MATRIX_PW)
                 await send_room_message(room_id, content)
 
-    return web.Response(text='{"status": %i, "ret": "%s"}' % (status, ret),
-                        content_type='application/json',
-                        status=status)
+    return create_json_response(status, ret)
+
+
+def create_json_response(status, ret):
+    """Create a JSON response."""
+    response_data = {'status': status, 'ret': ret}
+    return web.json_response(response_data, status=status)
 
 
 async def send_room_message(room_id, content):

From 78b9533e2bd0c567cea0f4e150dd4ac884a19e0e Mon Sep 17 00:00:00 2001
From: Jochen Kupperschmidt 
Date: Sun, 27 Dec 2020 14:50:52 +0100
Subject: [PATCH 011/184] Exit early after each request check

Keeps the "happy path" on the function's base indentation level.

Avoids carrying status and return value variables, pre-filled in
anticipation of an error, along.
---
 matrix_webhook.py | 43 +++++++++++++++++++++++--------------------
 1 file changed, 23 insertions(+), 20 deletions(-)

diff --git a/matrix_webhook.py b/matrix_webhook.py
index 12f47d2..6a88346 100755
--- a/matrix_webhook.py
+++ b/matrix_webhook.py
@@ -33,30 +33,33 @@ async def handler(request):
     This one handles a POST, checks its content, and forwards it to the matrix room.
     """
     data = await request.read()
+
     try:
         data = json.loads(data.decode())
-        status, ret = HTTPStatus.BAD_REQUEST, 'I need a json dict with text & key'
     except json.decoder.JSONDecodeError:
-        data = {}
-        status, ret = HTTPStatus.BAD_REQUEST, 'This was not valid JSON'
-    if all(key in data for key in ['text', 'key']):
-        status, ret = HTTPStatus.UNAUTHORIZED, 'I need the good "key"'
-        if data['key'] == API_KEY:
-            status, ret = HTTPStatus.OK, 'OK'
-            room_id = str(request.rel_url)[1:]
-            content = {
-                "msgtype": "m.text",
-                "body": data['text'],
-                "format": "org.matrix.custom.html",
-                "formatted_body": markdown(data['text'], extensions=['extra']),
-            }
-            try:
-                await send_room_message(room_id, content)
-            except LocalProtocolError:  # Connection lost, try another login
-                await CLIENT.login(MATRIX_PW)
-                await send_room_message(room_id, content)
+        return create_json_response(HTTPStatus.BAD_REQUEST, 'Invalid JSON')
 
-    return create_json_response(status, ret)
+    if not all(key in data for key in ['text', 'key']):
+        return create_json_response(HTTPStatus.BAD_REQUEST,
+                                    'Missing text and/or API key property')
+
+    if data['key'] != API_KEY:
+        return create_json_response(HTTPStatus.UNAUTHORIZED, 'Invalid API key')
+
+    room_id = str(request.rel_url)[1:]
+    content = {
+        "msgtype": "m.text",
+        "body": data['text'],
+        "format": "org.matrix.custom.html",
+        "formatted_body": markdown(data['text'], extensions=['extra']),
+    }
+    try:
+        await send_room_message(room_id, content)
+    except LocalProtocolError:  # Connection lost, try another login
+        await CLIENT.login(MATRIX_PW)
+        await send_room_message(room_id, content)
+
+    return create_json_response(HTTPStatus.OK, 'OK')
 
 
 def create_json_response(status, ret):

From 00f47f99a923ceb845d2e1606c4ddb45dfdce08f Mon Sep 17 00:00:00 2001
From: Jochen Kupperschmidt 
Date: Sun, 27 Dec 2020 14:57:14 +0100
Subject: [PATCH 012/184] Unify use of single quotes for non-docstring string
 literals

---
 matrix_webhook.py | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)

diff --git a/matrix_webhook.py b/matrix_webhook.py
index 6a88346..2f8482e 100755
--- a/matrix_webhook.py
+++ b/matrix_webhook.py
@@ -48,10 +48,10 @@ async def handler(request):
 
     room_id = str(request.rel_url)[1:]
     content = {
-        "msgtype": "m.text",
-        "body": data['text'],
-        "format": "org.matrix.custom.html",
-        "formatted_body": markdown(data['text'], extensions=['extra']),
+        'msgtype': 'm.text',
+        'body': data['text'],
+        'format': 'org.matrix.custom.html',
+        'formatted_body': markdown(data['text'], extensions=['extra']),
     }
     try:
         await send_room_message(room_id, content)

From 4506632c6fdc0f23aa35ef7e39dbe8456f12794e Mon Sep 17 00:00:00 2001
From: Guilhem Saurel 
Date: Mon, 12 Apr 2021 21:05:20 +0200
Subject: [PATCH 013/184] python 3.9

---
 Dockerfile | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/Dockerfile b/Dockerfile
index 83edd1c..131bfd6 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,4 +1,4 @@
-FROM python:3.8-slim
+FROM python:3.9-slim
 
 EXPOSE 4785
 

From 98cd9362aa87ae2d3e86e71a6450a257e2285c5d Mon Sep 17 00:00:00 2001
From: Guilhem Saurel 
Date: Sun, 30 May 2021 16:34:09 +0200
Subject: [PATCH 014/184] Add HOST to PORT configuration, and document both

Co-authored-by: Sven Seeberg 
---
 README.md         | 2 ++
 matrix_webhook.py | 2 +-
 2 files changed, 3 insertions(+), 1 deletion(-)

diff --git a/README.md b/README.md
index ca3fe6d..7e6229b 100644
--- a/README.md
+++ b/README.md
@@ -11,6 +11,8 @@ environment variables:
 - `MATRIX_ID`: the user id of the bot on this server
 - `MATRIX_PW`: the password for this user
 - `API_KEY`: a secret to share with the users of the service
+- `HOST`: HOST to listen on, all interfaces if `''` (default).
+- `PORT`: PORT to listed on, default to 4785.
 
 ## Dev
 
diff --git a/matrix_webhook.py b/matrix_webhook.py
index 2f8482e..b8b02a4 100755
--- a/matrix_webhook.py
+++ b/matrix_webhook.py
@@ -18,7 +18,7 @@ from markdown import markdown
 from nio import AsyncClient
 from nio.exceptions import LocalProtocolError
 
-SERVER_ADDRESS = ('', int(os.environ.get('PORT', 4785)))
+SERVER_ADDRESS = (os.environ.get('INTERFACE', None), int(os.environ.get('PORT', 4785)))
 MATRIX_URL = os.environ.get('MATRIX_URL', 'https://matrix.org')
 MATRIX_ID = os.environ.get('MATRIX_ID', '@wwm:matrix.org')
 MATRIX_PW = os.environ['MATRIX_PW']

From 2499832e1c69cb96d272fbbfe03a6bf90536d5a0 Mon Sep 17 00:00:00 2001
From: Guilhem Saurel 
Date: Sun, 30 May 2021 16:40:01 +0200
Subject: [PATCH 015/184] use aiohttp.web.BaseRequest.path instead of rel_url

Co-authored-by: Sven Seeberg 
---
 matrix_webhook.py | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/matrix_webhook.py b/matrix_webhook.py
index b8b02a4..87b3664 100755
--- a/matrix_webhook.py
+++ b/matrix_webhook.py
@@ -18,7 +18,7 @@ from markdown import markdown
 from nio import AsyncClient
 from nio.exceptions import LocalProtocolError
 
-SERVER_ADDRESS = (os.environ.get('INTERFACE', None), int(os.environ.get('PORT', 4785)))
+SERVER_ADDRESS = (os.environ.get('INTERFACE', ''), int(os.environ.get('PORT', 4785)))
 MATRIX_URL = os.environ.get('MATRIX_URL', 'https://matrix.org')
 MATRIX_ID = os.environ.get('MATRIX_ID', '@wwm:matrix.org')
 MATRIX_PW = os.environ['MATRIX_PW']
@@ -46,7 +46,7 @@ async def handler(request):
     if data['key'] != API_KEY:
         return create_json_response(HTTPStatus.UNAUTHORIZED, 'Invalid API key')
 
-    room_id = str(request.rel_url)[1:]
+    room_id = request.path[1:]
     content = {
         'msgtype': 'm.text',
         'body': data['text'],

From fb17a87016e3c0994e99f41e7dd15df607c9a778 Mon Sep 17 00:00:00 2001
From: Guilhem Saurel 
Date: Sun, 30 May 2021 22:14:42 +0200
Subject: [PATCH 016/184] typo

---
 matrix_webhook.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/matrix_webhook.py b/matrix_webhook.py
index 87b3664..50e1afc 100755
--- a/matrix_webhook.py
+++ b/matrix_webhook.py
@@ -18,7 +18,7 @@ from markdown import markdown
 from nio import AsyncClient
 from nio.exceptions import LocalProtocolError
 
-SERVER_ADDRESS = (os.environ.get('INTERFACE', ''), int(os.environ.get('PORT', 4785)))
+SERVER_ADDRESS = (os.environ.get('HOST', ''), int(os.environ.get('PORT', 4785)))
 MATRIX_URL = os.environ.get('MATRIX_URL', 'https://matrix.org')
 MATRIX_ID = os.environ.get('MATRIX_ID', '@wwm:matrix.org')
 MATRIX_PW = os.environ['MATRIX_PW']

From abe6497421702e85c64955b701edd58c7855b2d8 Mon Sep 17 00:00:00 2001
From: Guilhem Saurel 
Date: Thu, 17 Jun 2021 01:23:25 +0200
Subject: [PATCH 017/184] setup tests

---
 .github/workflows/test.yml |   12 +
 matrix_webhook.py          |    9 +-
 setup.cfg                  |    2 +
 test.yml                   |   11 +
 tests/.env                 |    4 +
 tests/Dockerfile           |   17 +
 tests/__init__.py          |    1 +
 tests/homeserver.yaml      | 2910 ++++++++++++++++++++++++++++++++++++
 tests/tests.py             |   51 +
 tests/utils.py             |   49 +
 10 files changed, 3060 insertions(+), 6 deletions(-)
 create mode 100644 .github/workflows/test.yml
 create mode 100644 setup.cfg
 create mode 100644 test.yml
 create mode 100644 tests/.env
 create mode 100644 tests/Dockerfile
 create mode 100644 tests/__init__.py
 create mode 100644 tests/homeserver.yaml
 create mode 100644 tests/tests.py
 create mode 100644 tests/utils.py

diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
new file mode 100644
index 0000000..2819793
--- /dev/null
+++ b/.github/workflows/test.yml
@@ -0,0 +1,12 @@
+name: build docker image and run tests inside
+on: push
+jobs:
+  tests:
+    runs-on: ubuntu-latest
+    steps:
+      - name: Check out repository code
+        uses: actions/checkout@v2
+      - name: Start
+        run: docker-compose -f test.yml up -d
+      - name: Tests
+        run: docker-compose -f test.yml run --entrypoint "" tests python -m unittest
diff --git a/matrix_webhook.py b/matrix_webhook.py
index 50e1afc..e9f2efa 100755
--- a/matrix_webhook.py
+++ b/matrix_webhook.py
@@ -40,8 +40,7 @@ async def handler(request):
         return create_json_response(HTTPStatus.BAD_REQUEST, 'Invalid JSON')
 
     if not all(key in data for key in ['text', 'key']):
-        return create_json_response(HTTPStatus.BAD_REQUEST,
-                                    'Missing text and/or API key property')
+        return create_json_response(HTTPStatus.BAD_REQUEST, 'Missing text and/or API key property')
 
     if data['key'] != API_KEY:
         return create_json_response(HTTPStatus.UNAUTHORIZED, 'Invalid API key')
@@ -51,7 +50,7 @@ async def handler(request):
         'msgtype': 'm.text',
         'body': data['text'],
         'format': 'org.matrix.custom.html',
-        'formatted_body': markdown(data['text'], extensions=['extra']),
+        'formatted_body': markdown(str(data['text']), extensions=['extra']),
     }
     try:
         await send_room_message(room_id, content)
@@ -70,9 +69,7 @@ def create_json_response(status, ret):
 
 async def send_room_message(room_id, content):
     """Send a message to a room."""
-    return await CLIENT.room_send(room_id=room_id,
-                                  message_type='m.room.message',
-                                  content=content)
+    return await CLIENT.room_send(room_id=room_id, message_type='m.room.message', content=content)
 
 
 async def main(event):
diff --git a/setup.cfg b/setup.cfg
new file mode 100644
index 0000000..4a3b956
--- /dev/null
+++ b/setup.cfg
@@ -0,0 +1,2 @@
+[pydocstyle]
+ignore = D203,D204,D212
diff --git a/test.yml b/test.yml
new file mode 100644
index 0000000..891b72b
--- /dev/null
+++ b/test.yml
@@ -0,0 +1,11 @@
+version: '3'
+
+services:
+  bot:
+    build: .
+    env_file:
+      - tests/.env
+  tests:
+    build: tests
+    env_file:
+      - tests/.env
diff --git a/tests/.env b/tests/.env
new file mode 100644
index 0000000..b2047d8
--- /dev/null
+++ b/tests/.env
@@ -0,0 +1,4 @@
+MATRIX_URL=http://tests
+MATRIX_ID=bot
+MATRIX_PW=pw
+API_KEY=ak
diff --git a/tests/Dockerfile b/tests/Dockerfile
new file mode 100644
index 0000000..2cd55d3
--- /dev/null
+++ b/tests/Dockerfile
@@ -0,0 +1,17 @@
+FROM matrixdotorg/synapse
+
+# This defaults to /data which is a volume aiming at keeping data.
+# Here, we want to trash those, and avoid the permission issues, so let's use something else
+ENV SYNAPSE_CONFIG_DIR=/srv
+
+WORKDIR $SYNAPSE_CONFIG_DIR
+
+ADD homeserver.yaml .
+
+RUN python -m synapse.app.homeserver --config-path homeserver.yaml --generate-keys
+
+RUN chown -R 991:991 .
+
+RUN python -m pip install aiohttp matrix-nio
+
+ADD . .
diff --git a/tests/__init__.py b/tests/__init__.py
new file mode 100644
index 0000000..6994d90
--- /dev/null
+++ b/tests/__init__.py
@@ -0,0 +1 @@
+"""Make this directory a valid module for unittests autodiscover to work."""
diff --git a/tests/homeserver.yaml b/tests/homeserver.yaml
new file mode 100644
index 0000000..4d3e994
--- /dev/null
+++ b/tests/homeserver.yaml
@@ -0,0 +1,2910 @@
+# Configuration file for Synapse.
+#
+# This is a YAML file: see [1] for a quick introduction. Note in particular
+# that *indentation is important*: all the elements of a list or dictionary
+# should have the same indentation.
+#
+# [1] https://docs.ansible.com/ansible/latest/reference_appendices/YAMLSyntax.html
+
+## Server ##
+
+# The public-facing domain of the server
+#
+# The server_name name will appear at the end of usernames and room addresses
+# created on this server. For example if the server_name was example.com,
+# usernames on this server would be in the format @user:example.com
+#
+# In most cases you should avoid using a matrix specific subdomain such as
+# matrix.example.com or synapse.example.com as the server_name for the same
+# reasons you wouldn't use user@email.example.com as your email address.
+# See https://github.com/matrix-org/synapse/blob/master/docs/delegate.md
+# for information on how to host Synapse on a subdomain while preserving
+# a clean server_name.
+#
+# The server_name cannot be changed later so it is important to
+# configure this correctly before you start Synapse. It should be all
+# lowercase and may contain an explicit port.
+# Examples: matrix.org, localhost:8080
+#
+server_name: "tests"
+
+# When running as a daemon, the file to store the pid in
+#
+pid_file: /srv/homeserver.pid
+
+# The absolute URL to the web client which /_matrix/client will redirect
+# to if 'webclient' is configured under the 'listeners' configuration.
+#
+# This option can be also set to the filesystem path to the web client
+# which will be served at /_matrix/client/ if 'webclient' is configured
+# under the 'listeners' configuration, however this is a security risk:
+# https://github.com/matrix-org/synapse#security-note
+#
+#web_client_location: https://riot.example.com/
+
+# The public-facing base URL that clients use to access this Homeserver (not
+# including _matrix/...). This is the same URL a user might enter into the
+# 'Custom Homeserver URL' field on their client. If you use Synapse with a
+# reverse proxy, this should be the URL to reach Synapse via the proxy.
+# Otherwise, it should be the URL to reach Synapse's client HTTP listener (see
+# 'listeners' below).
+#
+#public_baseurl: https://example.com/
+
+# Set the soft limit on the number of file descriptors synapse can use
+# Zero is used to indicate synapse should set the soft limit to the
+# hard limit.
+#
+#soft_file_limit: 0
+
+# Presence tracking allows users to see the state (e.g online/offline)
+# of other local and remote users.
+#
+presence:
+  # Uncomment to disable presence tracking on this homeserver. This option
+  # replaces the previous top-level 'use_presence' option.
+  #
+  #enabled: false
+
+  # Presence routers are third-party modules that can specify additional logic
+  # to where presence updates from users are routed.
+  #
+  presence_router:
+    # The custom module's class. Uncomment to use a custom presence router module.
+    #
+    #module: "my_custom_router.PresenceRouter"
+
+    # Configuration options of the custom module. Refer to your module's
+    # documentation for available options.
+    #
+    #config:
+    #  example_option: 'something'
+
+# Whether to require authentication to retrieve profile data (avatars,
+# display names) of other users through the client API. Defaults to
+# 'false'. Note that profile data is also available via the federation
+# API, unless allow_profile_lookup_over_federation is set to false.
+#
+#require_auth_for_profile_requests: true
+
+# Uncomment to require a user to share a room with another user in order
+# to retrieve their profile information. Only checked on Client-Server
+# requests. Profile requests from other servers should be checked by the
+# requesting server. Defaults to 'false'.
+#
+#limit_profile_requests_to_users_who_share_rooms: true
+
+# Uncomment to prevent a user's profile data from being retrieved and
+# displayed in a room until they have joined it. By default, a user's
+# profile data is included in an invite event, regardless of the values
+# of the above two settings, and whether or not the users share a server.
+# Defaults to 'true'.
+#
+#include_profile_data_on_invite: false
+
+# If set to 'true', removes the need for authentication to access the server's
+# public rooms directory through the client API, meaning that anyone can
+# query the room directory. Defaults to 'false'.
+#
+#allow_public_rooms_without_auth: true
+
+# If set to 'true', allows any other homeserver to fetch the server's public
+# rooms directory via federation. Defaults to 'false'.
+#
+#allow_public_rooms_over_federation: true
+
+# The default room version for newly created rooms.
+#
+# Known room versions are listed here:
+# https://matrix.org/docs/spec/#complete-list-of-room-versions
+#
+# For example, for room version 1, default_room_version should be set
+# to "1".
+#
+#default_room_version: "6"
+
+# The GC threshold parameters to pass to `gc.set_threshold`, if defined
+#
+#gc_thresholds: [700, 10, 10]
+
+# The minimum time in seconds between each GC for a generation, regardless of
+# the GC thresholds. This ensures that we don't do GC too frequently.
+#
+# A value of `[1s, 10s, 30s]` indicates that a second must pass between consecutive
+# generation 0 GCs, etc.
+#
+# Defaults to `[1s, 10s, 30s]`.
+#
+#gc_min_interval: [0.5s, 30s, 1m]
+
+# Set the limit on the returned events in the timeline in the get
+# and sync operations. The default value is 100. -1 means no upper limit.
+#
+# Uncomment the following to increase the limit to 5000.
+#
+#filter_timeline_limit: 5000
+
+# Whether room invites to users on this server should be blocked
+# (except those sent by local server admins). The default is False.
+#
+#block_non_admin_invites: true
+
+# Room searching
+#
+# If disabled, new messages will not be indexed for searching and users
+# will receive errors when searching for messages. Defaults to enabled.
+#
+#enable_search: false
+
+# Prevent outgoing requests from being sent to the following blacklisted IP address
+# CIDR ranges. If this option is not specified then it defaults to private IP
+# address ranges (see the example below).
+#
+# The blacklist applies to the outbound requests for federation, identity servers,
+# push servers, and for checking key validity for third-party invite events.
+#
+# (0.0.0.0 and :: are always blacklisted, whether or not they are explicitly
+# listed here, since they correspond to unroutable addresses.)
+#
+# This option replaces federation_ip_range_blacklist in Synapse v1.25.0.
+#
+#ip_range_blacklist:
+#  - '127.0.0.0/8'
+#  - '10.0.0.0/8'
+#  - '172.16.0.0/12'
+#  - '192.168.0.0/16'
+#  - '100.64.0.0/10'
+#  - '192.0.0.0/24'
+#  - '169.254.0.0/16'
+#  - '192.88.99.0/24'
+#  - '198.18.0.0/15'
+#  - '192.0.2.0/24'
+#  - '198.51.100.0/24'
+#  - '203.0.113.0/24'
+#  - '224.0.0.0/4'
+#  - '::1/128'
+#  - 'fe80::/10'
+#  - 'fc00::/7'
+#  - '2001:db8::/32'
+#  - 'ff00::/8'
+#  - 'fec0::/10'
+
+# List of IP address CIDR ranges that should be allowed for federation,
+# identity servers, push servers, and for checking key validity for
+# third-party invite events. This is useful for specifying exceptions to
+# wide-ranging blacklisted target IP ranges - e.g. for communication with
+# a push server only visible in your network.
+#
+# This whitelist overrides ip_range_blacklist and defaults to an empty
+# list.
+#
+#ip_range_whitelist:
+#   - '192.168.1.1'
+
+# List of ports that Synapse should listen on, their purpose and their
+# configuration.
+#
+# Options for each listener include:
+#
+#   port: the TCP port to bind to
+#
+#   bind_addresses: a list of local addresses to listen on. The default is
+#       'all local interfaces'.
+#
+#   type: the type of listener. Normally 'http', but other valid options are:
+#       'manhole' (see docs/manhole.md),
+#       'metrics' (see docs/metrics-howto.md),
+#       'replication' (see docs/workers.md).
+#
+#   tls: set to true to enable TLS for this listener. Will use the TLS
+#       key/cert specified in tls_private_key_path / tls_certificate_path.
+#
+#   x_forwarded: Only valid for an 'http' listener. Set to true to use the
+#       X-Forwarded-For header as the client IP. Useful when Synapse is
+#       behind a reverse-proxy.
+#
+#   resources: Only valid for an 'http' listener. A list of resources to host
+#       on this port. Options for each resource are:
+#
+#       names: a list of names of HTTP resources. See below for a list of
+#           valid resource names.
+#
+#       compress: set to true to enable HTTP compression for this resource.
+#
+#   additional_resources: Only valid for an 'http' listener. A map of
+#        additional endpoints which should be loaded via dynamic modules.
+#
+# Valid resource names are:
+#
+#   client: the client-server API (/_matrix/client), and the synapse admin
+#       API (/_synapse/admin). Also implies 'media' and 'static'.
+#
+#   consent: user consent forms (/_matrix/consent). See
+#       docs/consent_tracking.md.
+#
+#   federation: the server-server API (/_matrix/federation). Also implies
+#       'media', 'keys', 'openid'
+#
+#   keys: the key discovery API (/_matrix/keys).
+#
+#   media: the media API (/_matrix/media).
+#
+#   metrics: the metrics interface. See docs/metrics-howto.md.
+#
+#   openid: OpenID authentication.
+#
+#   replication: the HTTP replication API (/_synapse/replication). See
+#       docs/workers.md.
+#
+#   static: static resources under synapse/static (/_matrix/static). (Mostly
+#       useful for 'fallback authentication'.)
+#
+#   webclient: A web client. Requires web_client_location to be set.
+#
+listeners:
+  # TLS-enabled listener: for when matrix traffic is sent directly to synapse.
+  #
+  # Disabled by default. To enable it, uncomment the following. (Note that you
+  # will also need to give Synapse a TLS key and certificate: see the TLS section
+  # below.)
+  #
+  #- port: 8448
+  #  type: http
+  #  tls: true
+  #  resources:
+  #    - names: [client, federation]
+
+  # Unsecure HTTP listener: for when matrix traffic passes through a reverse proxy
+  # that unwraps TLS.
+  #
+  # If you plan to use a reverse proxy, please see
+  # https://github.com/matrix-org/synapse/blob/master/docs/reverse_proxy.md.
+  #
+  - port: 80
+    tls: false
+    type: http
+    x_forwarded: true
+
+    resources:
+      - names: [client, federation]
+        compress: false
+
+    # example additional_resources:
+    #
+    #additional_resources:
+    #  "/_matrix/my/custom/endpoint":
+    #    module: my_module.CustomRequestHandler
+    #    config: {}
+
+  # Turn on the twisted ssh manhole service on localhost on the given
+  # port.
+  #
+  #- port: 9000
+  #  bind_addresses: ['::1', '127.0.0.1']
+  #  type: manhole
+
+# Forward extremities can build up in a room due to networking delays between
+# homeservers. Once this happens in a large room, calculation of the state of
+# that room can become quite expensive. To mitigate this, once the number of
+# forward extremities reaches a given threshold, Synapse will send an
+# org.matrix.dummy_event event, which will reduce the forward extremities
+# in the room.
+#
+# This setting defines the threshold (i.e. number of forward extremities in the
+# room) at which dummy events are sent. The default value is 10.
+#
+#dummy_events_threshold: 5
+
+
+## Homeserver blocking ##
+
+# How to reach the server admin, used in ResourceLimitError
+#
+#admin_contact: 'mailto:admin@server.com'
+
+# Global blocking
+#
+#hs_disabled: false
+#hs_disabled_message: 'Human readable reason for why the HS is blocked'
+
+# Monthly Active User Blocking
+#
+# Used in cases where the admin or server owner wants to limit to the
+# number of monthly active users.
+#
+# 'limit_usage_by_mau' disables/enables monthly active user blocking. When
+# enabled and a limit is reached the server returns a 'ResourceLimitError'
+# with error type Codes.RESOURCE_LIMIT_EXCEEDED
+#
+# 'max_mau_value' is the hard limit of monthly active users above which
+# the server will start blocking user actions.
+#
+# 'mau_trial_days' is a means to add a grace period for active users. It
+# means that users must be active for this number of days before they
+# can be considered active and guards against the case where lots of users
+# sign up in a short space of time never to return after their initial
+# session.
+#
+# 'mau_limit_alerting' is a means of limiting client side alerting
+# should the mau limit be reached. This is useful for small instances
+# where the admin has 5 mau seats (say) for 5 specific people and no
+# interest increasing the mau limit further. Defaults to True, which
+# means that alerting is enabled
+#
+#limit_usage_by_mau: false
+#max_mau_value: 50
+#mau_trial_days: 2
+#mau_limit_alerting: false
+
+# If enabled, the metrics for the number of monthly active users will
+# be populated, however no one will be limited. If limit_usage_by_mau
+# is true, this is implied to be true.
+#
+#mau_stats_only: false
+
+# Sometimes the server admin will want to ensure certain accounts are
+# never blocked by mau checking. These accounts are specified here.
+#
+#mau_limit_reserved_threepids:
+#  - medium: 'email'
+#    address: 'reserved_user@example.com'
+
+# Used by phonehome stats to group together related servers.
+#server_context: context
+
+# Resource-constrained homeserver settings
+#
+# When this is enabled, the room "complexity" will be checked before a user
+# joins a new remote room. If it is above the complexity limit, the server will
+# disallow joining, or will instantly leave.
+#
+# Room complexity is an arbitrary measure based on factors such as the number of
+# users in the room.
+#
+limit_remote_rooms:
+  # Uncomment to enable room complexity checking.
+  #
+  #enabled: true
+
+  # the limit above which rooms cannot be joined. The default is 1.0.
+  #
+  #complexity: 0.5
+
+  # override the error which is returned when the room is too complex.
+  #
+  #complexity_error: "This room is too complex."
+
+  # allow server admins to join complex rooms. Default is false.
+  #
+  #admins_can_join: true
+
+# Whether to require a user to be in the room to add an alias to it.
+# Defaults to 'true'.
+#
+#require_membership_for_aliases: false
+
+# Whether to allow per-room membership profiles through the send of membership
+# events with profile information that differ from the target's global profile.
+# Defaults to 'true'.
+#
+#allow_per_room_profiles: false
+
+# How long to keep redacted events in unredacted form in the database. After
+# this period redacted events get replaced with their redacted form in the DB.
+#
+# Defaults to `7d`. Set to `null` to disable.
+#
+#redaction_retention_period: 28d
+
+# How long to track users' last seen time and IPs in the database.
+#
+# Defaults to `28d`. Set to `null` to disable clearing out of old rows.
+#
+#user_ips_max_age: 14d
+
+# Message retention policy at the server level.
+#
+# Room admins and mods can define a retention period for their rooms using the
+# 'm.room.retention' state event, and server admins can cap this period by setting
+# the 'allowed_lifetime_min' and 'allowed_lifetime_max' config options.
+#
+# If this feature is enabled, Synapse will regularly look for and purge events
+# which are older than the room's maximum retention period. Synapse will also
+# filter events received over federation so that events that should have been
+# purged are ignored and not stored again.
+#
+retention:
+  # The message retention policies feature is disabled by default. Uncomment the
+  # following line to enable it.
+  #
+  #enabled: true
+
+  # Default retention policy. If set, Synapse will apply it to rooms that lack the
+  # 'm.room.retention' state event. Currently, the value of 'min_lifetime' doesn't
+  # matter much because Synapse doesn't take it into account yet.
+  #
+  #default_policy:
+  #  min_lifetime: 1d
+  #  max_lifetime: 1y
+
+  # Retention policy limits. If set, and the state of a room contains a
+  # 'm.room.retention' event in its state which contains a 'min_lifetime' or a
+  # 'max_lifetime' that's out of these bounds, Synapse will cap the room's policy
+  # to these limits when running purge jobs.
+  #
+  #allowed_lifetime_min: 1d
+  #allowed_lifetime_max: 1y
+
+  # Server admins can define the settings of the background jobs purging the
+  # events which lifetime has expired under the 'purge_jobs' section.
+  #
+  # If no configuration is provided, a single job will be set up to delete expired
+  # events in every room daily.
+  #
+  # Each job's configuration defines which range of message lifetimes the job
+  # takes care of. For example, if 'shortest_max_lifetime' is '2d' and
+  # 'longest_max_lifetime' is '3d', the job will handle purging expired events in
+  # rooms whose state defines a 'max_lifetime' that's both higher than 2 days, and
+  # lower than or equal to 3 days. Both the minimum and the maximum value of a
+  # range are optional, e.g. a job with no 'shortest_max_lifetime' and a
+  # 'longest_max_lifetime' of '3d' will handle every room with a retention policy
+  # which 'max_lifetime' is lower than or equal to three days.
+  #
+  # The rationale for this per-job configuration is that some rooms might have a
+  # retention policy with a low 'max_lifetime', where history needs to be purged
+  # of outdated messages on a more frequent basis than for the rest of the rooms
+  # (e.g. every 12h), but not want that purge to be performed by a job that's
+  # iterating over every room it knows, which could be heavy on the server.
+  #
+  # If any purge job is configured, it is strongly recommended to have at least
+  # a single job with neither 'shortest_max_lifetime' nor 'longest_max_lifetime'
+  # set, or one job without 'shortest_max_lifetime' and one job without
+  # 'longest_max_lifetime' set. Otherwise some rooms might be ignored, even if
+  # 'allowed_lifetime_min' and 'allowed_lifetime_max' are set, because capping a
+  # room's policy to these values is done after the policies are retrieved from
+  # Synapse's database (which is done using the range specified in a purge job's
+  # configuration).
+  #
+  #purge_jobs:
+  #  - longest_max_lifetime: 3d
+  #    interval: 12h
+  #  - shortest_max_lifetime: 3d
+  #    interval: 1d
+
+# Inhibits the /requestToken endpoints from returning an error that might leak
+# information about whether an e-mail address is in use or not on this
+# homeserver.
+# Note that for some endpoints the error situation is the e-mail already being
+# used, and for others the error is entering the e-mail being unused.
+# If this option is enabled, instead of returning an error, these endpoints will
+# act as if no error happened and return a fake session ID ('sid') to clients.
+#
+#request_token_inhibit_3pid_errors: true
+
+# A list of domains that the domain portion of 'next_link' parameters
+# must match.
+#
+# This parameter is optionally provided by clients while requesting
+# validation of an email or phone number, and maps to a link that
+# users will be automatically redirected to after validation
+# succeeds. Clients can make use this parameter to aid the validation
+# process.
+#
+# The whitelist is applied whether the homeserver or an
+# identity server is handling validation.
+#
+# The default value is no whitelist functionality; all domains are
+# allowed. Setting this value to an empty list will instead disallow
+# all domains.
+#
+#next_link_domain_whitelist: ["matrix.org"]
+
+
+## TLS ##
+
+# PEM-encoded X509 certificate for TLS.
+# This certificate, as of Synapse 1.0, will need to be a valid and verifiable
+# certificate, signed by a recognised Certificate Authority.
+#
+# See 'ACME support' below to enable auto-provisioning this certificate via
+# Let's Encrypt.
+#
+# If supplying your own, be sure to use a `.pem` file that includes the
+# full certificate chain including any intermediate certificates (for
+# instance, if using certbot, use `fullchain.pem` as your certificate,
+# not `cert.pem`).
+#
+#tls_certificate_path: "/data/synapse.tls.crt"
+
+# PEM-encoded private key for TLS
+#
+#tls_private_key_path: "/data/synapse.tls.key"
+
+# Whether to verify TLS server certificates for outbound federation requests.
+#
+# Defaults to `true`. To disable certificate verification, uncomment the
+# following line.
+#
+#federation_verify_certificates: false
+
+# The minimum TLS version that will be used for outbound federation requests.
+#
+# Defaults to `1`. Configurable to `1`, `1.1`, `1.2`, or `1.3`. Note
+# that setting this value higher than `1.2` will prevent federation to most
+# of the public Matrix network: only configure it to `1.3` if you have an
+# entirely private federation setup and you can ensure TLS 1.3 support.
+#
+#federation_client_minimum_tls_version: 1.2
+
+# Skip federation certificate verification on the following whitelist
+# of domains.
+#
+# This setting should only be used in very specific cases, such as
+# federation over Tor hidden services and similar. For private networks
+# of homeservers, you likely want to use a private CA instead.
+#
+# Only effective if federation_verify_certicates is `true`.
+#
+#federation_certificate_verification_whitelist:
+#  - lon.example.com
+#  - *.domain.com
+#  - *.onion
+
+# List of custom certificate authorities for federation traffic.
+#
+# This setting should only normally be used within a private network of
+# homeservers.
+#
+# Note that this list will replace those that are provided by your
+# operating environment. Certificates must be in PEM format.
+#
+#federation_custom_ca_list:
+#  - myCA1.pem
+#  - myCA2.pem
+#  - myCA3.pem
+
+# ACME support: This will configure Synapse to request a valid TLS certificate
+# for your configured `server_name` via Let's Encrypt.
+#
+# Note that ACME v1 is now deprecated, and Synapse currently doesn't support
+# ACME v2. This means that this feature currently won't work with installs set
+# up after November 2019. For more info, and alternative solutions, see
+# https://github.com/matrix-org/synapse/blob/master/docs/ACME.md#deprecation-of-acme-v1
+#
+# Note that provisioning a certificate in this way requires port 80 to be
+# routed to Synapse so that it can complete the http-01 ACME challenge.
+# By default, if you enable ACME support, Synapse will attempt to listen on
+# port 80 for incoming http-01 challenges - however, this will likely fail
+# with 'Permission denied' or a similar error.
+#
+# There are a couple of potential solutions to this:
+#
+#  * If you already have an Apache, Nginx, or similar listening on port 80,
+#    you can configure Synapse to use an alternate port, and have your web
+#    server forward the requests. For example, assuming you set 'port: 8009'
+#    below, on Apache, you would write:
+#
+#    ProxyPass /.well-known/acme-challenge http://localhost:8009/.well-known/acme-challenge
+#
+#  * Alternatively, you can use something like `authbind` to give Synapse
+#    permission to listen on port 80.
+#
+acme:
+    # ACME support is disabled by default. Set this to `true` and uncomment
+    # tls_certificate_path and tls_private_key_path above to enable it.
+    #
+    enabled: false
+
+    # Endpoint to use to request certificates. If you only want to test,
+    # use Let's Encrypt's staging url:
+    #     https://acme-staging.api.letsencrypt.org/directory
+    #
+    #url: https://acme-v01.api.letsencrypt.org/directory
+
+    # Port number to listen on for the HTTP-01 challenge. Change this if
+    # you are forwarding connections through Apache/Nginx/etc.
+    #
+    port: 80
+
+    # Local addresses to listen on for incoming connections.
+    # Again, you may want to change this if you are forwarding connections
+    # through Apache/Nginx/etc.
+    #
+    bind_addresses: ['::', '0.0.0.0']
+
+    # How many days remaining on a certificate before it is renewed.
+    #
+    reprovision_threshold: 30
+
+    # The domain that the certificate should be for. Normally this
+    # should be the same as your Matrix domain (i.e., 'server_name'), but,
+    # by putting a file at 'https:///.well-known/matrix/server',
+    # you can delegate incoming traffic to another server. If you do that,
+    # you should give the target of the delegation here.
+    #
+    # For example: if your 'server_name' is 'example.com', but
+    # 'https://example.com/.well-known/matrix/server' delegates to
+    # 'matrix.example.com', you should put 'matrix.example.com' here.
+    #
+    # If not set, defaults to your 'server_name'.
+    #
+    domain: matrix.example.com
+
+    # file to use for the account key. This will be generated if it doesn't
+    # exist.
+    #
+    # If unspecified, we will use CONFDIR/client.key.
+    #
+    account_key_file: /srv/acme_account.key
+
+
+## Federation ##
+
+# Restrict federation to the following whitelist of domains.
+# N.B. we recommend also firewalling your federation listener to limit
+# inbound federation traffic as early as possible, rather than relying
+# purely on this application-layer restriction.  If not specified, the
+# default is to whitelist everything.
+#
+#federation_domain_whitelist:
+#  - lon.example.com
+#  - nyc.example.com
+#  - syd.example.com
+
+# Report prometheus metrics on the age of PDUs being sent to and received from
+# the following domains. This can be used to give an idea of "delay" on inbound
+# and outbound federation, though be aware that any delay can be due to problems
+# at either end or with the intermediate network.
+#
+# By default, no domains are monitored in this way.
+#
+#federation_metrics_domains:
+#  - matrix.org
+#  - example.com
+
+# Uncomment to disable profile lookup over federation. By default, the
+# Federation API allows other homeservers to obtain profile data of any user
+# on this homeserver. Defaults to 'true'.
+#
+#allow_profile_lookup_over_federation: false
+
+# Uncomment to disable device display name lookup over federation. By default, the
+# Federation API allows other homeservers to obtain device display names of any user
+# on this homeserver. Defaults to 'true'.
+#
+#allow_device_name_lookup_over_federation: false
+
+
+## Caching ##
+
+# Caching can be configured through the following options.
+#
+# A cache 'factor' is a multiplier that can be applied to each of
+# Synapse's caches in order to increase or decrease the maximum
+# number of entries that can be stored.
+
+# The number of events to cache in memory. Not affected by
+# caches.global_factor.
+#
+#event_cache_size: 10K
+
+caches:
+   # Controls the global cache factor, which is the default cache factor
+   # for all caches if a specific factor for that cache is not otherwise
+   # set.
+   #
+   # This can also be set by the "SYNAPSE_CACHE_FACTOR" environment
+   # variable. Setting by environment variable takes priority over
+   # setting through the config file.
+   #
+   # Defaults to 0.5, which will half the size of all caches.
+   #
+   #global_factor: 1.0
+
+   # A dictionary of cache name to cache factor for that individual
+   # cache. Overrides the global cache factor for a given cache.
+   #
+   # These can also be set through environment variables comprised
+   # of "SYNAPSE_CACHE_FACTOR_" + the name of the cache in capital
+   # letters and underscores. Setting by environment variable
+   # takes priority over setting through the config file.
+   # Ex. SYNAPSE_CACHE_FACTOR_GET_USERS_WHO_SHARE_ROOM_WITH_USER=2.0
+   #
+   # Some caches have '*' and other characters that are not
+   # alphanumeric or underscores. These caches can be named with or
+   # without the special characters stripped. For example, to specify
+   # the cache factor for `*stateGroupCache*` via an environment
+   # variable would be `SYNAPSE_CACHE_FACTOR_STATEGROUPCACHE=2.0`.
+   #
+   per_cache_factors:
+     #get_users_who_share_room_with_user: 2.0
+
+
+## Database ##
+
+# The 'database' setting defines the database that synapse uses to store all of
+# its data.
+#
+# 'name' gives the database engine to use: either 'sqlite3' (for SQLite) or
+# 'psycopg2' (for PostgreSQL).
+#
+# 'args' gives options which are passed through to the database engine,
+# except for options starting 'cp_', which are used to configure the Twisted
+# connection pool. For a reference to valid arguments, see:
+#   * for sqlite: https://docs.python.org/3/library/sqlite3.html#sqlite3.connect
+#   * for postgres: https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-PARAMKEYWORDS
+#   * for the connection pool: https://twistedmatrix.com/documents/current/api/twisted.enterprise.adbapi.ConnectionPool.html#__init__
+#
+#
+# Example SQLite configuration:
+#
+#database:
+#  name: sqlite3
+#  args:
+#    database: /path/to/homeserver.db
+#
+#
+# Example Postgres configuration:
+#
+#database:
+#  name: psycopg2
+#  args:
+#    user: synapse_user
+#    password: secretpassword
+#    database: synapse
+#    host: localhost
+#    port: 5432
+#    cp_min: 5
+#    cp_max: 10
+#
+# For more information on using Synapse with Postgres, see `docs/postgres.md`.
+#
+database:
+  name: sqlite3
+  args:
+    database: /srv/homeserver.db
+
+
+## Logging ##
+
+# A yaml python logging config file as described by
+# https://docs.python.org/3.7/library/logging.config.html#configuration-dictionary-schema
+#
+log_config: "/srv/synapse.log.config"
+
+
+## Ratelimiting ##
+
+# Ratelimiting settings for client actions (registration, login, messaging).
+#
+# Each ratelimiting configuration is made of two parameters:
+#   - per_second: number of requests a client can send per second.
+#   - burst_count: number of requests a client can send before being throttled.
+#
+# Synapse currently uses the following configurations:
+#   - one for messages that ratelimits sending based on the account the client
+#     is using
+#   - one for registration that ratelimits registration requests based on the
+#     client's IP address.
+#   - one for login that ratelimits login requests based on the client's IP
+#     address.
+#   - one for login that ratelimits login requests based on the account the
+#     client is attempting to log into.
+#   - one for login that ratelimits login requests based on the account the
+#     client is attempting to log into, based on the amount of failed login
+#     attempts for this account.
+#   - one for ratelimiting redactions by room admins. If this is not explicitly
+#     set then it uses the same ratelimiting as per rc_message. This is useful
+#     to allow room admins to deal with abuse quickly.
+#   - two for ratelimiting number of rooms a user can join, "local" for when
+#     users are joining rooms the server is already in (this is cheap) vs
+#     "remote" for when users are trying to join rooms not on the server (which
+#     can be more expensive)
+#   - one for ratelimiting how often a user or IP can attempt to validate a 3PID.
+#   - two for ratelimiting how often invites can be sent in a room or to a
+#     specific user.
+#
+# The defaults are as shown below.
+#
+#rc_message:
+#  per_second: 0.2
+#  burst_count: 10
+#
+#rc_registration:
+#  per_second: 0.17
+#  burst_count: 3
+#
+#rc_login:
+#  address:
+#    per_second: 0.17
+#    burst_count: 3
+#  account:
+#    per_second: 0.17
+#    burst_count: 3
+#  failed_attempts:
+#    per_second: 0.17
+#    burst_count: 3
+#
+#rc_admin_redaction:
+#  per_second: 1
+#  burst_count: 50
+#
+#rc_joins:
+#  local:
+#    per_second: 0.1
+#    burst_count: 10
+#  remote:
+#    per_second: 0.01
+#    burst_count: 10
+#
+#rc_3pid_validation:
+#  per_second: 0.003
+#  burst_count: 5
+#
+#rc_invites:
+#  per_room:
+#    per_second: 0.3
+#    burst_count: 10
+#  per_user:
+#    per_second: 0.003
+#    burst_count: 5
+
+# Ratelimiting settings for incoming federation
+#
+# The rc_federation configuration is made up of the following settings:
+#   - window_size: window size in milliseconds
+#   - sleep_limit: number of federation requests from a single server in
+#     a window before the server will delay processing the request.
+#   - sleep_delay: duration in milliseconds to delay processing events
+#     from remote servers by if they go over the sleep limit.
+#   - reject_limit: maximum number of concurrent federation requests
+#     allowed from a single server
+#   - concurrent: number of federation requests to concurrently process
+#     from a single server
+#
+# The defaults are as shown below.
+#
+#rc_federation:
+#  window_size: 1000
+#  sleep_limit: 10
+#  sleep_delay: 500
+#  reject_limit: 50
+#  concurrent: 3
+
+# Target outgoing federation transaction frequency for sending read-receipts,
+# per-room.
+#
+# If we end up trying to send out more read-receipts, they will get buffered up
+# into fewer transactions.
+#
+#federation_rr_transactions_per_room_per_second: 50
+
+
+
+## Media Store ##
+
+# Enable the media store service in the Synapse master. Uncomment the
+# following if you are using a separate media store worker.
+#
+#enable_media_repo: false
+
+# Directory where uploaded images and attachments are stored.
+#
+media_store_path: "/srv/media_store"
+
+# Media storage providers allow media to be stored in different
+# locations.
+#
+#media_storage_providers:
+#  - module: file_system
+#    # Whether to store newly uploaded local files
+#    store_local: false
+#    # Whether to store newly downloaded remote files
+#    store_remote: false
+#    # Whether to wait for successful storage for local uploads
+#    store_synchronous: false
+#    config:
+#       directory: /mnt/some/other/directory
+
+# The largest allowed upload size in bytes
+#
+#max_upload_size: 50M
+
+# Maximum number of pixels that will be thumbnailed
+#
+#max_image_pixels: 32M
+
+# Whether to generate new thumbnails on the fly to precisely match
+# the resolution requested by the client. If true then whenever
+# a new resolution is requested by the client the server will
+# generate a new thumbnail. If false the server will pick a thumbnail
+# from a precalculated list.
+#
+#dynamic_thumbnails: false
+
+# List of thumbnails to precalculate when an image is uploaded.
+#
+#thumbnail_sizes:
+#  - width: 32
+#    height: 32
+#    method: crop
+#  - width: 96
+#    height: 96
+#    method: crop
+#  - width: 320
+#    height: 240
+#    method: scale
+#  - width: 640
+#    height: 480
+#    method: scale
+#  - width: 800
+#    height: 600
+#    method: scale
+
+# Is the preview URL API enabled?
+#
+# 'false' by default: uncomment the following to enable it (and specify a
+# url_preview_ip_range_blacklist blacklist).
+#
+#url_preview_enabled: true
+
+# List of IP address CIDR ranges that the URL preview spider is denied
+# from accessing.  There are no defaults: you must explicitly
+# specify a list for URL previewing to work.  You should specify any
+# internal services in your network that you do not want synapse to try
+# to connect to, otherwise anyone in any Matrix room could cause your
+# synapse to issue arbitrary GET requests to your internal services,
+# causing serious security issues.
+#
+# (0.0.0.0 and :: are always blacklisted, whether or not they are explicitly
+# listed here, since they correspond to unroutable addresses.)
+#
+# This must be specified if url_preview_enabled is set. It is recommended that
+# you uncomment the following list as a starting point.
+#
+#url_preview_ip_range_blacklist:
+#  - '127.0.0.0/8'
+#  - '10.0.0.0/8'
+#  - '172.16.0.0/12'
+#  - '192.168.0.0/16'
+#  - '100.64.0.0/10'
+#  - '192.0.0.0/24'
+#  - '169.254.0.0/16'
+#  - '192.88.99.0/24'
+#  - '198.18.0.0/15'
+#  - '192.0.2.0/24'
+#  - '198.51.100.0/24'
+#  - '203.0.113.0/24'
+#  - '224.0.0.0/4'
+#  - '::1/128'
+#  - 'fe80::/10'
+#  - 'fc00::/7'
+#  - '2001:db8::/32'
+#  - 'ff00::/8'
+#  - 'fec0::/10'
+
+# List of IP address CIDR ranges that the URL preview spider is allowed
+# to access even if they are specified in url_preview_ip_range_blacklist.
+# This is useful for specifying exceptions to wide-ranging blacklisted
+# target IP ranges - e.g. for enabling URL previews for a specific private
+# website only visible in your network.
+#
+#url_preview_ip_range_whitelist:
+#   - '192.168.1.1'
+
+# Optional list of URL matches that the URL preview spider is
+# denied from accessing.  You should use url_preview_ip_range_blacklist
+# in preference to this, otherwise someone could define a public DNS
+# entry that points to a private IP address and circumvent the blacklist.
+# This is more useful if you know there is an entire shape of URL that
+# you know that will never want synapse to try to spider.
+#
+# Each list entry is a dictionary of url component attributes as returned
+# by urlparse.urlsplit as applied to the absolute form of the URL.  See
+# https://docs.python.org/2/library/urlparse.html#urlparse.urlsplit
+# The values of the dictionary are treated as an filename match pattern
+# applied to that component of URLs, unless they start with a ^ in which
+# case they are treated as a regular expression match.  If all the
+# specified component matches for a given list item succeed, the URL is
+# blacklisted.
+#
+#url_preview_url_blacklist:
+#  # blacklist any URL with a username in its URI
+#  - username: '*'
+#
+#  # blacklist all *.google.com URLs
+#  - netloc: 'google.com'
+#  - netloc: '*.google.com'
+#
+#  # blacklist all plain HTTP URLs
+#  - scheme: 'http'
+#
+#  # blacklist http(s)://www.acme.com/foo
+#  - netloc: 'www.acme.com'
+#    path: '/foo'
+#
+#  # blacklist any URL with a literal IPv4 address
+#  - netloc: '^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$'
+
+# The largest allowed URL preview spidering size in bytes
+#
+#max_spider_size: 10M
+
+# A list of values for the Accept-Language HTTP header used when
+# downloading webpages during URL preview generation. This allows
+# Synapse to specify the preferred languages that URL previews should
+# be in when communicating with remote servers.
+#
+# Each value is a IETF language tag; a 2-3 letter identifier for a
+# language, optionally followed by subtags separated by '-', specifying
+# a country or region variant.
+#
+# Multiple values can be provided, and a weight can be added to each by
+# using quality value syntax (;q=). '*' translates to any language.
+#
+# Defaults to "en".
+#
+# Example:
+#
+# url_preview_accept_language:
+#   - en-UK
+#   - en-US;q=0.9
+#   - fr;q=0.8
+#   - *;q=0.7
+#
+url_preview_accept_language:
+#   - en
+
+
+## Captcha ##
+# See docs/CAPTCHA_SETUP.md for full details of configuring this.
+
+# This homeserver's ReCAPTCHA public key. Must be specified if
+# enable_registration_captcha is enabled.
+#
+#recaptcha_public_key: "YOUR_PUBLIC_KEY"
+
+# This homeserver's ReCAPTCHA private key. Must be specified if
+# enable_registration_captcha is enabled.
+#
+#recaptcha_private_key: "YOUR_PRIVATE_KEY"
+
+# Uncomment to enable ReCaptcha checks when registering, preventing signup
+# unless a captcha is answered. Requires a valid ReCaptcha
+# public/private key. Defaults to 'false'.
+#
+#enable_registration_captcha: true
+
+# The API endpoint to use for verifying m.login.recaptcha responses.
+# Defaults to "https://www.recaptcha.net/recaptcha/api/siteverify".
+#
+#recaptcha_siteverify_api: "https://my.recaptcha.site"
+
+
+## TURN ##
+
+# The public URIs of the TURN server to give to clients
+#
+#turn_uris: []
+
+# The shared secret used to compute passwords for the TURN server
+#
+#turn_shared_secret: "YOUR_SHARED_SECRET"
+
+# The Username and password if the TURN server needs them and
+# does not use a token
+#
+#turn_username: "TURNSERVER_USERNAME"
+#turn_password: "TURNSERVER_PASSWORD"
+
+# How long generated TURN credentials last
+#
+#turn_user_lifetime: 1h
+
+# Whether guests should be allowed to use the TURN server.
+# This defaults to True, otherwise VoIP will be unreliable for guests.
+# However, it does introduce a slight security risk as it allows users to
+# connect to arbitrary endpoints without having first signed up for a
+# valid account (e.g. by passing a CAPTCHA).
+#
+#turn_allow_guests: true
+
+
+## Registration ##
+#
+# Registration can be rate-limited using the parameters in the "Ratelimiting"
+# section of this file.
+
+# Enable registration for new users.
+#
+#enable_registration: false
+
+# Time that a user's session remains valid for, after they log in.
+#
+# Note that this is not currently compatible with guest logins.
+#
+# Note also that this is calculated at login time: changes are not applied
+# retrospectively to users who have already logged in.
+#
+# By default, this is infinite.
+#
+#session_lifetime: 24h
+
+# The user must provide all of the below types of 3PID when registering.
+#
+#registrations_require_3pid:
+#  - email
+#  - msisdn
+
+# Explicitly disable asking for MSISDNs from the registration
+# flow (overrides registrations_require_3pid if MSISDNs are set as required)
+#
+#disable_msisdn_registration: true
+
+# Mandate that users are only allowed to associate certain formats of
+# 3PIDs with accounts on this server.
+#
+#allowed_local_3pids:
+#  - medium: email
+#    pattern: '^[^@]+@matrix\.org$'
+#  - medium: email
+#    pattern: '^[^@]+@vector\.im$'
+#  - medium: msisdn
+#    pattern: '\+44'
+
+# Enable 3PIDs lookup requests to identity servers from this server.
+#
+#enable_3pid_lookup: true
+
+# If set, allows registration of standard or admin accounts by anyone who
+# has the shared secret, even if registration is otherwise disabled.
+#
+registration_shared_secret: "P_P.1Rn*Uh;2fOwJto=+FRixCws.i-k2uHle#Fhk-JBPXYS5_v"
+
+# Set the number of bcrypt rounds used to generate password hash.
+# Larger numbers increase the work factor needed to generate the hash.
+# The default number is 12 (which equates to 2^12 rounds).
+# N.B. that increasing this will exponentially increase the time required
+# to register or login - e.g. 24 => 2^24 rounds which will take >20 mins.
+#
+#bcrypt_rounds: 12
+
+# Allows users to register as guests without a password/email/etc, and
+# participate in rooms hosted on this server which have been made
+# accessible to anonymous users.
+#
+#allow_guest_access: false
+
+# The identity server which we suggest that clients should use when users log
+# in on this server.
+#
+# (By default, no suggestion is made, so it is left up to the client.
+# This setting is ignored unless public_baseurl is also set.)
+#
+#default_identity_server: https://matrix.org
+
+# Handle threepid (email/phone etc) registration and password resets through a set of
+# *trusted* identity servers. Note that this allows the configured identity server to
+# reset passwords for accounts!
+#
+# Be aware that if `email` is not set, and SMTP options have not been
+# configured in the email config block, registration and user password resets via
+# email will be globally disabled.
+#
+# Additionally, if `msisdn` is not set, registration and password resets via msisdn
+# will be disabled regardless, and users will not be able to associate an msisdn
+# identifier to their account. This is due to Synapse currently not supporting
+# any method of sending SMS messages on its own.
+#
+# To enable using an identity server for operations regarding a particular third-party
+# identifier type, set the value to the URL of that identity server as shown in the
+# examples below.
+#
+# Servers handling the these requests must answer the `/requestToken` endpoints defined
+# by the Matrix Identity Service API specification:
+# https://matrix.org/docs/spec/identity_service/latest
+#
+# If a delegate is specified, the config option public_baseurl must also be filled out.
+#
+account_threepid_delegates:
+    #email: https://example.com     # Delegate email sending to example.com
+    #msisdn: http://localhost:8090  # Delegate SMS sending to this local process
+
+# Whether users are allowed to change their displayname after it has
+# been initially set. Useful when provisioning users based on the
+# contents of a third-party directory.
+#
+# Does not apply to server administrators. Defaults to 'true'
+#
+#enable_set_displayname: false
+
+# Whether users are allowed to change their avatar after it has been
+# initially set. Useful when provisioning users based on the contents
+# of a third-party directory.
+#
+# Does not apply to server administrators. Defaults to 'true'
+#
+#enable_set_avatar_url: false
+
+# Whether users can change the 3PIDs associated with their accounts
+# (email address and msisdn).
+#
+# Defaults to 'true'
+#
+#enable_3pid_changes: false
+
+# Users who register on this homeserver will automatically be joined
+# to these rooms.
+#
+# By default, any room aliases included in this list will be created
+# as a publicly joinable room when the first user registers for the
+# homeserver. This behaviour can be customised with the settings below.
+# If the room already exists, make certain it is a publicly joinable
+# room. The join rule of the room must be set to 'public'.
+#
+#auto_join_rooms:
+#  - "#example:example.com"
+
+# Where auto_join_rooms are specified, setting this flag ensures that the
+# the rooms exist by creating them when the first user on the
+# homeserver registers.
+#
+# By default the auto-created rooms are publicly joinable from any federated
+# server. Use the autocreate_auto_join_rooms_federated and
+# autocreate_auto_join_room_preset settings below to customise this behaviour.
+#
+# Setting to false means that if the rooms are not manually created,
+# users cannot be auto-joined since they do not exist.
+#
+# Defaults to true. Uncomment the following line to disable automatically
+# creating auto-join rooms.
+#
+#autocreate_auto_join_rooms: false
+
+# Whether the auto_join_rooms that are auto-created are available via
+# federation. Only has an effect if autocreate_auto_join_rooms is true.
+#
+# Note that whether a room is federated cannot be modified after
+# creation.
+#
+# Defaults to true: the room will be joinable from other servers.
+# Uncomment the following to prevent users from other homeservers from
+# joining these rooms.
+#
+#autocreate_auto_join_rooms_federated: false
+
+# The room preset to use when auto-creating one of auto_join_rooms. Only has an
+# effect if autocreate_auto_join_rooms is true.
+#
+# This can be one of "public_chat", "private_chat", or "trusted_private_chat".
+# If a value of "private_chat" or "trusted_private_chat" is used then
+# auto_join_mxid_localpart must also be configured.
+#
+# Defaults to "public_chat", meaning that the room is joinable by anyone, including
+# federated servers if autocreate_auto_join_rooms_federated is true (the default).
+# Uncomment the following to require an invitation to join these rooms.
+#
+#autocreate_auto_join_room_preset: private_chat
+
+# The local part of the user id which is used to create auto_join_rooms if
+# autocreate_auto_join_rooms is true. If this is not provided then the
+# initial user account that registers will be used to create the rooms.
+#
+# The user id is also used to invite new users to any auto-join rooms which
+# are set to invite-only.
+#
+# It *must* be configured if autocreate_auto_join_room_preset is set to
+# "private_chat" or "trusted_private_chat".
+#
+# Note that this must be specified in order for new users to be correctly
+# invited to any auto-join rooms which have been set to invite-only (either
+# at the time of creation or subsequently).
+#
+# Note that, if the room already exists, this user must be joined and
+# have the appropriate permissions to invite new members.
+#
+#auto_join_mxid_localpart: system
+
+# When auto_join_rooms is specified, setting this flag to false prevents
+# guest accounts from being automatically joined to the rooms.
+#
+# Defaults to true.
+#
+#auto_join_rooms_for_guests: false
+
+
+## Account Validity ##
+
+# Optional account validity configuration. This allows for accounts to be denied
+# any request after a given period.
+#
+# Once this feature is enabled, Synapse will look for registered users without an
+# expiration date at startup and will add one to every account it found using the
+# current settings at that time.
+# This means that, if a validity period is set, and Synapse is restarted (it will
+# then derive an expiration date from the current validity period), and some time
+# after that the validity period changes and Synapse is restarted, the users'
+# expiration dates won't be updated unless their account is manually renewed. This
+# date will be randomly selected within a range [now + period - d ; now + period],
+# where d is equal to 10% of the validity period.
+#
+account_validity:
+  # The account validity feature is disabled by default. Uncomment the
+  # following line to enable it.
+  #
+  #enabled: true
+
+  # The period after which an account is valid after its registration. When
+  # renewing the account, its validity period will be extended by this amount
+  # of time. This parameter is required when using the account validity
+  # feature.
+  #
+  #period: 6w
+
+  # The amount of time before an account's expiry date at which Synapse will
+  # send an email to the account's email address with a renewal link. By
+  # default, no such emails are sent.
+  #
+  # If you enable this setting, you will also need to fill out the 'email' and
+  # 'public_baseurl' configuration sections.
+  #
+  #renew_at: 1w
+
+  # The subject of the email sent out with the renewal link. '%(app)s' can be
+  # used as a placeholder for the 'app_name' parameter from the 'email'
+  # section.
+  #
+  # Note that the placeholder must be written '%(app)s', including the
+  # trailing 's'.
+  #
+  # If this is not set, a default value is used.
+  #
+  #renew_email_subject: "Renew your %(app)s account"
+
+  # Directory in which Synapse will try to find templates for the HTML files to
+  # serve to the user when trying to renew an account. If not set, default
+  # templates from within the Synapse package will be used.
+  #
+  # The currently available templates are:
+  #
+  # * account_renewed.html: Displayed to the user after they have successfully
+  #       renewed their account.
+  #
+  # * account_previously_renewed.html: Displayed to the user if they attempt to
+  #       renew their account with a token that is valid, but that has already
+  #       been used. In this case the account is not renewed again.
+  #
+  # * invalid_token.html: Displayed to the user when they try to renew an account
+  #       with an unknown or invalid renewal token.
+  #
+  # See https://github.com/matrix-org/synapse/tree/master/synapse/res/templates for
+  # default template contents.
+  #
+  # The file name of some of these templates can be configured below for legacy
+  # reasons.
+  #
+  #template_dir: "res/templates"
+
+  # A custom file name for the 'account_renewed.html' template.
+  #
+  # If not set, the file is assumed to be named "account_renewed.html".
+  #
+  #account_renewed_html_path: "account_renewed.html"
+
+  # A custom file name for the 'invalid_token.html' template.
+  #
+  # If not set, the file is assumed to be named "invalid_token.html".
+  #
+  #invalid_token_html_path: "invalid_token.html"
+
+
+## Metrics ###
+
+# Enable collection and rendering of performance metrics
+#
+#enable_metrics: false
+
+# Enable sentry integration
+# NOTE: While attempts are made to ensure that the logs don't contain
+# any sensitive information, this cannot be guaranteed. By enabling
+# this option the sentry server may therefore receive sensitive
+# information, and it in turn may then diseminate sensitive information
+# through insecure notification channels if so configured.
+#
+#sentry:
+#    dsn: "..."
+
+# Flags to enable Prometheus metrics which are not suitable to be
+# enabled by default, either for performance reasons or limited use.
+#
+metrics_flags:
+    # Publish synapse_federation_known_servers, a gauge of the number of
+    # servers this homeserver knows about, including itself. May cause
+    # performance problems on large homeservers.
+    #
+    #known_servers: true
+
+# Whether or not to report anonymized homeserver usage statistics.
+#
+report_stats: false
+
+# The endpoint to report the anonymized homeserver usage statistics to.
+# Defaults to https://matrix.org/report-usage-stats/push
+#
+#report_stats_endpoint: https://example.com/report-usage-stats/push
+
+
+## API Configuration ##
+
+# Controls for the state that is shared with users who receive an invite
+# to a room
+#
+room_prejoin_state:
+   # By default, the following state event types are shared with users who
+   # receive invites to the room:
+   #
+   # - m.room.join_rules
+   # - m.room.canonical_alias
+   # - m.room.avatar
+   # - m.room.encryption
+   # - m.room.name
+   # - m.room.create
+   #
+   # Uncomment the following to disable these defaults (so that only the event
+   # types listed in 'additional_event_types' are shared). Defaults to 'false'.
+   #
+   #disable_default_event_types: true
+
+   # Additional state event types to share with users when they are invited
+   # to a room.
+   #
+   # By default, this list is empty (so only the default event types are shared).
+   #
+   #additional_event_types:
+   #  - org.example.custom.event.type
+
+
+# A list of application service config files to use
+#
+#app_service_config_files:
+#  - app_service_1.yaml
+#  - app_service_2.yaml
+
+# Uncomment to enable tracking of application service IP addresses. Implicitly
+# enables MAU tracking for application service users.
+#
+#track_appservice_user_ips: true
+
+
+# a secret which is used to sign access tokens. If none is specified,
+# the registration_shared_secret is used, if one is given; otherwise,
+# a secret key is derived from the signing key.
+#
+macaroon_secret_key: "#fcFaqplV,^c5:CmYAfKdEGcHqZ7YKSuS&Gq0DFw3BO@crX;pr"
+
+# a secret which is used to calculate HMACs for form values, to stop
+# falsification of values. Must be specified for the User Consent
+# forms to work.
+#
+form_secret: "yLW&wmKAN+7IOFfRkS5MnSOuHa2ur7&N~NJOqe46PRwiI*s.#_"
+
+## Signing Keys ##
+
+# Path to the signing key to sign messages with
+#
+signing_key_path: "/srv/synapse.signing.key"
+
+# The keys that the server used to sign messages with but won't use
+# to sign new messages.
+#
+old_signing_keys:
+  # For each key, `key` should be the base64-encoded public key, and
+  # `expired_ts`should be the time (in milliseconds since the unix epoch) that
+  # it was last used.
+  #
+  # It is possible to build an entry from an old signing.key file using the
+  # `export_signing_key` script which is provided with synapse.
+  #
+  # For example:
+  #
+  #"ed25519:id": { key: "base64string", expired_ts: 123456789123 }
+
+# How long key response published by this server is valid for.
+# Used to set the valid_until_ts in /key/v2 APIs.
+# Determines how quickly servers will query to check which keys
+# are still valid.
+#
+#key_refresh_interval: 1d
+
+# The trusted servers to download signing keys from.
+#
+# When we need to fetch a signing key, each server is tried in parallel.
+#
+# Normally, the connection to the key server is validated via TLS certificates.
+# Additional security can be provided by configuring a `verify key`, which
+# will make synapse check that the response is signed by that key.
+#
+# This setting supercedes an older setting named `perspectives`. The old format
+# is still supported for backwards-compatibility, but it is deprecated.
+#
+# 'trusted_key_servers' defaults to matrix.org, but using it will generate a
+# warning on start-up. To suppress this warning, set
+# 'suppress_key_server_warning' to true.
+#
+# Options for each entry in the list include:
+#
+#    server_name: the name of the server. required.
+#
+#    verify_keys: an optional map from key id to base64-encoded public key.
+#       If specified, we will check that the response is signed by at least
+#       one of the given keys.
+#
+#    accept_keys_insecurely: a boolean. Normally, if `verify_keys` is unset,
+#       and federation_verify_certificates is not `true`, synapse will refuse
+#       to start, because this would allow anyone who can spoof DNS responses
+#       to masquerade as the trusted key server. If you know what you are doing
+#       and are sure that your network environment provides a secure connection
+#       to the key server, you can set this to `true` to override this
+#       behaviour.
+#
+# An example configuration might look like:
+#
+#trusted_key_servers:
+#  - server_name: "my_trusted_server.example.com"
+#    verify_keys:
+#      "ed25519:auto": "abcdefghijklmnopqrstuvwxyzabcdefghijklmopqr"
+#  - server_name: "my_other_trusted_server.example.com"
+#
+trusted_key_servers:
+  - server_name: "matrix.org"
+
+# Uncomment the following to disable the warning that is emitted when the
+# trusted_key_servers include 'matrix.org'. See above.
+#
+suppress_key_server_warning: true
+
+# The signing keys to use when acting as a trusted key server. If not specified
+# defaults to the server signing key.
+#
+# Can contain multiple keys, one per line.
+#
+#key_server_signing_keys_path: "key_server_signing_keys.key"
+
+
+## Single sign-on integration ##
+
+# The following settings can be used to make Synapse use a single sign-on
+# provider for authentication, instead of its internal password database.
+#
+# You will probably also want to set the following options to `false` to
+# disable the regular login/registration flows:
+#   * enable_registration
+#   * password_config.enabled
+#
+# You will also want to investigate the settings under the "sso" configuration
+# section below.
+
+# Enable SAML2 for registration and login. Uses pysaml2.
+#
+# At least one of `sp_config` or `config_path` must be set in this section to
+# enable SAML login.
+#
+# Once SAML support is enabled, a metadata file will be exposed at
+# https://:/_synapse/client/saml2/metadata.xml, which you may be able to
+# use to configure your SAML IdP with. Alternatively, you can manually configure
+# the IdP to use an ACS location of
+# https://:/_synapse/client/saml2/authn_response.
+#
+saml2_config:
+  # `sp_config` is the configuration for the pysaml2 Service Provider.
+  # See pysaml2 docs for format of config.
+  #
+  # Default values will be used for the 'entityid' and 'service' settings,
+  # so it is not normally necessary to specify them unless you need to
+  # override them.
+  #
+  sp_config:
+    # Point this to the IdP's metadata. You must provide either a local
+    # file via the `local` attribute or (preferably) a URL via the
+    # `remote` attribute.
+    #
+    #metadata:
+    #  local: ["saml2/idp.xml"]
+    #  remote:
+    #    - url: https://our_idp/metadata.xml
+
+    # Allowed clock difference in seconds between the homeserver and IdP.
+    #
+    # Uncomment the below to increase the accepted time difference from 0 to 3 seconds.
+    #
+    #accepted_time_diff: 3
+
+    # By default, the user has to go to our login page first. If you'd like
+    # to allow IdP-initiated login, set 'allow_unsolicited: true' in a
+    # 'service.sp' section:
+    #
+    #service:
+    #  sp:
+    #    allow_unsolicited: true
+
+    # The examples below are just used to generate our metadata xml, and you
+    # may well not need them, depending on your setup. Alternatively you
+    # may need a whole lot more detail - see the pysaml2 docs!
+
+    #description: ["My awesome SP", "en"]
+    #name: ["Test SP", "en"]
+
+    #ui_info:
+    #  display_name:
+    #    - lang: en
+    #      text: "Display Name is the descriptive name of your service."
+    #  description:
+    #    - lang: en
+    #      text: "Description should be a short paragraph explaining the purpose of the service."
+    #  information_url:
+    #    - lang: en
+    #      text: "https://example.com/terms-of-service"
+    #  privacy_statement_url:
+    #    - lang: en
+    #      text: "https://example.com/privacy-policy"
+    #  keywords:
+    #    - lang: en
+    #      text: ["Matrix", "Element"]
+    #  logo:
+    #    - lang: en
+    #      text: "https://example.com/logo.svg"
+    #      width: "200"
+    #      height: "80"
+
+    #organization:
+    #  name: Example com
+    #  display_name:
+    #    - ["Example co", "en"]
+    #  url: "http://example.com"
+
+    #contact_person:
+    #  - given_name: Bob
+    #    sur_name: "the Sysadmin"
+    #    email_address": ["admin@example.com"]
+    #    contact_type": technical
+
+  # Instead of putting the config inline as above, you can specify a
+  # separate pysaml2 configuration file:
+  #
+  #config_path: "/data/sp_conf.py"
+
+  # The lifetime of a SAML session. This defines how long a user has to
+  # complete the authentication process, if allow_unsolicited is unset.
+  # The default is 15 minutes.
+  #
+  #saml_session_lifetime: 5m
+
+  # An external module can be provided here as a custom solution to
+  # mapping attributes returned from a saml provider onto a matrix user.
+  #
+  user_mapping_provider:
+    # The custom module's class. Uncomment to use a custom module.
+    #
+    #module: mapping_provider.SamlMappingProvider
+
+    # Custom configuration values for the module. Below options are
+    # intended for the built-in provider, they should be changed if
+    # using a custom module. This section will be passed as a Python
+    # dictionary to the module's `parse_config` method.
+    #
+    config:
+      # The SAML attribute (after mapping via the attribute maps) to use
+      # to derive the Matrix ID from. 'uid' by default.
+      #
+      # Note: This used to be configured by the
+      # saml2_config.mxid_source_attribute option. If that is still
+      # defined, its value will be used instead.
+      #
+      #mxid_source_attribute: displayName
+
+      # The mapping system to use for mapping the saml attribute onto a
+      # matrix ID.
+      #
+      # Options include:
+      #  * 'hexencode' (which maps unpermitted characters to '=xx')
+      #  * 'dotreplace' (which replaces unpermitted characters with
+      #     '.').
+      # The default is 'hexencode'.
+      #
+      # Note: This used to be configured by the
+      # saml2_config.mxid_mapping option. If that is still defined, its
+      # value will be used instead.
+      #
+      #mxid_mapping: dotreplace
+
+  # In previous versions of synapse, the mapping from SAML attribute to
+  # MXID was always calculated dynamically rather than stored in a
+  # table. For backwards- compatibility, we will look for user_ids
+  # matching such a pattern before creating a new account.
+  #
+  # This setting controls the SAML attribute which will be used for this
+  # backwards-compatibility lookup. Typically it should be 'uid', but if
+  # the attribute maps are changed, it may be necessary to change it.
+  #
+  # The default is 'uid'.
+  #
+  #grandfathered_mxid_source_attribute: upn
+
+  # It is possible to configure Synapse to only allow logins if SAML attributes
+  # match particular values. The requirements can be listed under
+  # `attribute_requirements` as shown below. All of the listed attributes must
+  # match for the login to be permitted.
+  #
+  #attribute_requirements:
+  #  - attribute: userGroup
+  #    value: "staff"
+  #  - attribute: department
+  #    value: "sales"
+
+  # If the metadata XML contains multiple IdP entities then the `idp_entityid`
+  # option must be set to the entity to redirect users to.
+  #
+  # Most deployments only have a single IdP entity and so should omit this
+  # option.
+  #
+  #idp_entityid: 'https://our_idp/entityid'
+
+
+# List of OpenID Connect (OIDC) / OAuth 2.0 identity providers, for registration
+# and login.
+#
+# Options for each entry include:
+#
+#   idp_id: a unique identifier for this identity provider. Used internally
+#       by Synapse; should be a single word such as 'github'.
+#
+#       Note that, if this is changed, users authenticating via that provider
+#       will no longer be recognised as the same user!
+#
+#       (Use "oidc" here if you are migrating from an old "oidc_config"
+#       configuration.)
+#
+#   idp_name: A user-facing name for this identity provider, which is used to
+#       offer the user a choice of login mechanisms.
+#
+#   idp_icon: An optional icon for this identity provider, which is presented
+#       by clients and Synapse's own IdP picker page. If given, must be an
+#       MXC URI of the format mxc:///. (An easy way to
+#       obtain such an MXC URI is to upload an image to an (unencrypted) room
+#       and then copy the "url" from the source of the event.)
+#
+#   idp_brand: An optional brand for this identity provider, allowing clients
+#       to style the login flow according to the identity provider in question.
+#       See the spec for possible options here.
+#
+#   discover: set to 'false' to disable the use of the OIDC discovery mechanism
+#       to discover endpoints. Defaults to true.
+#
+#   issuer: Required. The OIDC issuer. Used to validate tokens and (if discovery
+#       is enabled) to discover the provider's endpoints.
+#
+#   client_id: Required. oauth2 client id to use.
+#
+#   client_secret: oauth2 client secret to use. May be omitted if
+#        client_secret_jwt_key is given, or if client_auth_method is 'none'.
+#
+#   client_secret_jwt_key: Alternative to client_secret: details of a key used
+#      to create a JSON Web Token to be used as an OAuth2 client secret. If
+#      given, must be a dictionary with the following properties:
+#
+#          key: a pem-encoded signing key. Must be a suitable key for the
+#              algorithm specified. Required unless 'key_file' is given.
+#
+#          key_file: the path to file containing a pem-encoded signing key file.
+#              Required unless 'key' is given.
+#
+#          jwt_header: a dictionary giving properties to include in the JWT
+#              header. Must include the key 'alg', giving the algorithm used to
+#              sign the JWT, such as "ES256", using the JWA identifiers in
+#              RFC7518.
+#
+#          jwt_payload: an optional dictionary giving properties to include in
+#              the JWT payload. Normally this should include an 'iss' key.
+#
+#   client_auth_method: auth method to use when exchanging the token. Valid
+#       values are 'client_secret_basic' (default), 'client_secret_post' and
+#       'none'.
+#
+#   scopes: list of scopes to request. This should normally include the "openid"
+#       scope. Defaults to ["openid"].
+#
+#   authorization_endpoint: the oauth2 authorization endpoint. Required if
+#       provider discovery is disabled.
+#
+#   token_endpoint: the oauth2 token endpoint. Required if provider discovery is
+#       disabled.
+#
+#   userinfo_endpoint: the OIDC userinfo endpoint. Required if discovery is
+#       disabled and the 'openid' scope is not requested.
+#
+#   jwks_uri: URI where to fetch the JWKS. Required if discovery is disabled and
+#       the 'openid' scope is used.
+#
+#   skip_verification: set to 'true' to skip metadata verification. Use this if
+#       you are connecting to a provider that is not OpenID Connect compliant.
+#       Defaults to false. Avoid this in production.
+#
+#   user_profile_method: Whether to fetch the user profile from the userinfo
+#       endpoint. Valid values are: 'auto' or 'userinfo_endpoint'.
+#
+#       Defaults to 'auto', which fetches the userinfo endpoint if 'openid' is
+#       included in 'scopes'. Set to 'userinfo_endpoint' to always fetch the
+#       userinfo endpoint.
+#
+#   allow_existing_users: set to 'true' to allow a user logging in via OIDC to
+#       match a pre-existing account instead of failing. This could be used if
+#       switching from password logins to OIDC. Defaults to false.
+#
+#   user_mapping_provider: Configuration for how attributes returned from a OIDC
+#       provider are mapped onto a matrix user. This setting has the following
+#       sub-properties:
+#
+#       module: The class name of a custom mapping module. Default is
+#           'synapse.handlers.oidc.JinjaOidcMappingProvider'.
+#           See https://github.com/matrix-org/synapse/blob/master/docs/sso_mapping_providers.md#openid-mapping-providers
+#           for information on implementing a custom mapping provider.
+#
+#       config: Configuration for the mapping provider module. This section will
+#           be passed as a Python dictionary to the user mapping provider
+#           module's `parse_config` method.
+#
+#           For the default provider, the following settings are available:
+#
+#             subject_claim: name of the claim containing a unique identifier
+#                 for the user. Defaults to 'sub', which OpenID Connect
+#                 compliant providers should provide.
+#
+#             localpart_template: Jinja2 template for the localpart of the MXID.
+#                 If this is not set, the user will be prompted to choose their
+#                 own username (see 'sso_auth_account_details.html' in the 'sso'
+#                 section of this file).
+#
+#             display_name_template: Jinja2 template for the display name to set
+#                 on first login. If unset, no displayname will be set.
+#
+#             email_template: Jinja2 template for the email address of the user.
+#                 If unset, no email address will be added to the account.
+#
+#             extra_attributes: a map of Jinja2 templates for extra attributes
+#                 to send back to the client during login.
+#                 Note that these are non-standard and clients will ignore them
+#                 without modifications.
+#
+#           When rendering, the Jinja2 templates are given a 'user' variable,
+#           which is set to the claims returned by the UserInfo Endpoint and/or
+#           in the ID Token.
+#
+#   It is possible to configure Synapse to only allow logins if certain attributes
+#   match particular values in the OIDC userinfo. The requirements can be listed under
+#   `attribute_requirements` as shown below. All of the listed attributes must
+#   match for the login to be permitted. Additional attributes can be added to
+#   userinfo by expanding the `scopes` section of the OIDC config to retrieve
+#   additional information from the OIDC provider.
+#
+#   If the OIDC claim is a list, then the attribute must match any value in the list.
+#   Otherwise, it must exactly match the value of the claim. Using the example
+#   below, the `family_name` claim MUST be "Stephensson", but the `groups`
+#   claim MUST contain "admin".
+#
+#   attribute_requirements:
+#     - attribute: family_name
+#       value: "Stephensson"
+#     - attribute: groups
+#       value: "admin"
+#
+# See https://github.com/matrix-org/synapse/blob/master/docs/openid.md
+# for information on how to configure these options.
+#
+# For backwards compatibility, it is also possible to configure a single OIDC
+# provider via an 'oidc_config' setting. This is now deprecated and admins are
+# advised to migrate to the 'oidc_providers' format. (When doing that migration,
+# use 'oidc' for the idp_id to ensure that existing users continue to be
+# recognised.)
+#
+oidc_providers:
+  # Generic example
+  #
+  #- idp_id: my_idp
+  #  idp_name: "My OpenID provider"
+  #  idp_icon: "mxc://example.com/mediaid"
+  #  discover: false
+  #  issuer: "https://accounts.example.com/"
+  #  client_id: "provided-by-your-issuer"
+  #  client_secret: "provided-by-your-issuer"
+  #  client_auth_method: client_secret_post
+  #  scopes: ["openid", "profile"]
+  #  authorization_endpoint: "https://accounts.example.com/oauth2/auth"
+  #  token_endpoint: "https://accounts.example.com/oauth2/token"
+  #  userinfo_endpoint: "https://accounts.example.com/userinfo"
+  #  jwks_uri: "https://accounts.example.com/.well-known/jwks.json"
+  #  skip_verification: true
+  #  user_mapping_provider:
+  #    config:
+  #      subject_claim: "id"
+  #      localpart_template: "{{ user.login }}"
+  #      display_name_template: "{{ user.name }}"
+  #      email_template: "{{ user.email }}"
+  #  attribute_requirements:
+  #    - attribute: userGroup
+  #      value: "synapseUsers"
+
+
+# Enable Central Authentication Service (CAS) for registration and login.
+#
+cas_config:
+  # Uncomment the following to enable authorization against a CAS server.
+  # Defaults to false.
+  #
+  #enabled: true
+
+  # The URL of the CAS authorization endpoint.
+  #
+  #server_url: "https://cas-server.com"
+
+  # The attribute of the CAS response to use as the display name.
+  #
+  # If unset, no displayname will be set.
+  #
+  #displayname_attribute: name
+
+  # It is possible to configure Synapse to only allow logins if CAS attributes
+  # match particular values. All of the keys in the mapping below must exist
+  # and the values must match the given value. Alternately if the given value
+  # is None then any value is allowed (the attribute just must exist).
+  # All of the listed attributes must match for the login to be permitted.
+  #
+  #required_attributes:
+  #  userGroup: "staff"
+  #  department: None
+
+
+# Additional settings to use with single-sign on systems such as OpenID Connect,
+# SAML2 and CAS.
+#
+sso:
+    # A list of client URLs which are whitelisted so that the user does not
+    # have to confirm giving access to their account to the URL. Any client
+    # whose URL starts with an entry in the following list will not be subject
+    # to an additional confirmation step after the SSO login is completed.
+    #
+    # WARNING: An entry such as "https://my.client" is insecure, because it
+    # will also match "https://my.client.evil.site", exposing your users to
+    # phishing attacks from evil.site. To avoid this, include a slash after the
+    # hostname: "https://my.client/".
+    #
+    # If public_baseurl is set, then the login fallback page (used by clients
+    # that don't natively support the required login flows) is whitelisted in
+    # addition to any URLs in this list.
+    #
+    # By default, this list is empty.
+    #
+    #client_whitelist:
+    #  - https://riot.im/develop
+    #  - https://my.custom.client/
+
+    # Directory in which Synapse will try to find the template files below.
+    # If not set, or the files named below are not found within the template
+    # directory, default templates from within the Synapse package will be used.
+    #
+    # Synapse will look for the following templates in this directory:
+    #
+    # * HTML page to prompt the user to choose an Identity Provider during
+    #   login: 'sso_login_idp_picker.html'.
+    #
+    #   This is only used if multiple SSO Identity Providers are configured.
+    #
+    #   When rendering, this template is given the following variables:
+    #     * redirect_url: the URL that the user will be redirected to after
+    #       login.
+    #
+    #     * server_name: the homeserver's name.
+    #
+    #     * providers: a list of available Identity Providers. Each element is
+    #       an object with the following attributes:
+    #
+    #         * idp_id: unique identifier for the IdP
+    #         * idp_name: user-facing name for the IdP
+    #         * idp_icon: if specified in the IdP config, an MXC URI for an icon
+    #              for the IdP
+    #         * idp_brand: if specified in the IdP config, a textual identifier
+    #              for the brand of the IdP
+    #
+    #   The rendered HTML page should contain a form which submits its results
+    #   back as a GET request, with the following query parameters:
+    #
+    #     * redirectUrl: the client redirect URI (ie, the `redirect_url` passed
+    #       to the template)
+    #
+    #     * idp: the 'idp_id' of the chosen IDP.
+    #
+    # * HTML page to prompt new users to enter a userid and confirm other
+    #   details: 'sso_auth_account_details.html'. This is only shown if the
+    #   SSO implementation (with any user_mapping_provider) does not return
+    #   a localpart.
+    #
+    #   When rendering, this template is given the following variables:
+    #
+    #     * server_name: the homeserver's name.
+    #
+    #     * idp: details of the SSO Identity Provider that the user logged in
+    #       with: an object with the following attributes:
+    #
+    #         * idp_id: unique identifier for the IdP
+    #         * idp_name: user-facing name for the IdP
+    #         * idp_icon: if specified in the IdP config, an MXC URI for an icon
+    #              for the IdP
+    #         * idp_brand: if specified in the IdP config, a textual identifier
+    #              for the brand of the IdP
+    #
+    #     * user_attributes: an object containing details about the user that
+    #       we received from the IdP. May have the following attributes:
+    #
+    #         * display_name: the user's display_name
+    #         * emails: a list of email addresses
+    #
+    #   The template should render a form which submits the following fields:
+    #
+    #     * username: the localpart of the user's chosen user id
+    #
+    # * HTML page allowing the user to consent to the server's terms and
+    #   conditions. This is only shown for new users, and only if
+    #   `user_consent.require_at_registration` is set.
+    #
+    #   When rendering, this template is given the following variables:
+    #
+    #     * server_name: the homeserver's name.
+    #
+    #     * user_id: the user's matrix proposed ID.
+    #
+    #     * user_profile.display_name: the user's proposed display name, if any.
+    #
+    #     * consent_version: the version of the terms that the user will be
+    #       shown
+    #
+    #     * terms_url: a link to the page showing the terms.
+    #
+    #   The template should render a form which submits the following fields:
+    #
+    #     * accepted_version: the version of the terms accepted by the user
+    #       (ie, 'consent_version' from the input variables).
+    #
+    # * HTML page for a confirmation step before redirecting back to the client
+    #   with the login token: 'sso_redirect_confirm.html'.
+    #
+    #   When rendering, this template is given the following variables:
+    #
+    #     * redirect_url: the URL the user is about to be redirected to.
+    #
+    #     * display_url: the same as `redirect_url`, but with the query
+    #                    parameters stripped. The intention is to have a
+    #                    human-readable URL to show to users, not to use it as
+    #                    the final address to redirect to.
+    #
+    #     * server_name: the homeserver's name.
+    #
+    #     * new_user: a boolean indicating whether this is the user's first time
+    #          logging in.
+    #
+    #     * user_id: the user's matrix ID.
+    #
+    #     * user_profile.avatar_url: an MXC URI for the user's avatar, if any.
+    #           None if the user has not set an avatar.
+    #
+    #     * user_profile.display_name: the user's display name. None if the user
+    #           has not set a display name.
+    #
+    # * HTML page which notifies the user that they are authenticating to confirm
+    #   an operation on their account during the user interactive authentication
+    #   process: 'sso_auth_confirm.html'.
+    #
+    #   When rendering, this template is given the following variables:
+    #     * redirect_url: the URL the user is about to be redirected to.
+    #
+    #     * description: the operation which the user is being asked to confirm
+    #
+    #     * idp: details of the Identity Provider that we will use to confirm
+    #       the user's identity: an object with the following attributes:
+    #
+    #         * idp_id: unique identifier for the IdP
+    #         * idp_name: user-facing name for the IdP
+    #         * idp_icon: if specified in the IdP config, an MXC URI for an icon
+    #              for the IdP
+    #         * idp_brand: if specified in the IdP config, a textual identifier
+    #              for the brand of the IdP
+    #
+    # * HTML page shown after a successful user interactive authentication session:
+    #   'sso_auth_success.html'.
+    #
+    #   Note that this page must include the JavaScript which notifies of a successful authentication
+    #   (see https://matrix.org/docs/spec/client_server/r0.6.0#fallback).
+    #
+    #   This template has no additional variables.
+    #
+    # * HTML page shown after a user-interactive authentication session which
+    #   does not map correctly onto the expected user: 'sso_auth_bad_user.html'.
+    #
+    #   When rendering, this template is given the following variables:
+    #     * server_name: the homeserver's name.
+    #     * user_id_to_verify: the MXID of the user that we are trying to
+    #       validate.
+    #
+    # * HTML page shown during single sign-on if a deactivated user (according to Synapse's database)
+    #   attempts to login: 'sso_account_deactivated.html'.
+    #
+    #   This template has no additional variables.
+    #
+    # * HTML page to display to users if something goes wrong during the
+    #   OpenID Connect authentication process: 'sso_error.html'.
+    #
+    #   When rendering, this template is given two variables:
+    #     * error: the technical name of the error
+    #     * error_description: a human-readable message for the error
+    #
+    # You can see the default templates at:
+    # https://github.com/matrix-org/synapse/tree/master/synapse/res/templates
+    #
+    #template_dir: "res/templates"
+
+
+# JSON web token integration. The following settings can be used to make
+# Synapse JSON web tokens for authentication, instead of its internal
+# password database.
+#
+# Each JSON Web Token needs to contain a "sub" (subject) claim, which is
+# used as the localpart of the mxid.
+#
+# Additionally, the expiration time ("exp"), not before time ("nbf"),
+# and issued at ("iat") claims are validated if present.
+#
+# Note that this is a non-standard login type and client support is
+# expected to be non-existent.
+#
+# See https://github.com/matrix-org/synapse/blob/master/docs/jwt.md.
+#
+#jwt_config:
+    # Uncomment the following to enable authorization using JSON web
+    # tokens. Defaults to false.
+    #
+    #enabled: true
+
+    # This is either the private shared secret or the public key used to
+    # decode the contents of the JSON web token.
+    #
+    # Required if 'enabled' is true.
+    #
+    #secret: "provided-by-your-issuer"
+
+    # The algorithm used to sign the JSON web token.
+    #
+    # Supported algorithms are listed at
+    # https://pyjwt.readthedocs.io/en/latest/algorithms.html
+    #
+    # Required if 'enabled' is true.
+    #
+    #algorithm: "provided-by-your-issuer"
+
+    # The issuer to validate the "iss" claim against.
+    #
+    # Optional, if provided the "iss" claim will be required and
+    # validated for all JSON web tokens.
+    #
+    #issuer: "provided-by-your-issuer"
+
+    # A list of audiences to validate the "aud" claim against.
+    #
+    # Optional, if provided the "aud" claim will be required and
+    # validated for all JSON web tokens.
+    #
+    # Note that if the "aud" claim is included in a JSON web token then
+    # validation will fail without configuring audiences.
+    #
+    #audiences:
+    #    - "provided-by-your-issuer"
+
+
+password_config:
+   # Uncomment to disable password login
+   #
+   #enabled: false
+
+   # Uncomment to disable authentication against the local password
+   # database. This is ignored if `enabled` is false, and is only useful
+   # if you have other password_providers.
+   #
+   #localdb_enabled: false
+
+   # Uncomment and change to a secret random string for extra security.
+   # DO NOT CHANGE THIS AFTER INITIAL SETUP!
+   #
+   #pepper: "EVEN_MORE_SECRET"
+
+   # Define and enforce a password policy. Each parameter is optional.
+   # This is an implementation of MSC2000.
+   #
+   policy:
+      # Whether to enforce the password policy.
+      # Defaults to 'false'.
+      #
+      #enabled: true
+
+      # Minimum accepted length for a password.
+      # Defaults to 0.
+      #
+      #minimum_length: 15
+
+      # Whether a password must contain at least one digit.
+      # Defaults to 'false'.
+      #
+      #require_digit: true
+
+      # Whether a password must contain at least one symbol.
+      # A symbol is any character that's not a number or a letter.
+      # Defaults to 'false'.
+      #
+      #require_symbol: true
+
+      # Whether a password must contain at least one lowercase letter.
+      # Defaults to 'false'.
+      #
+      #require_lowercase: true
+
+      # Whether a password must contain at least one lowercase letter.
+      # Defaults to 'false'.
+      #
+      #require_uppercase: true
+
+ui_auth:
+    # The amount of time to allow a user-interactive authentication session
+    # to be active.
+    #
+    # This defaults to 0, meaning the user is queried for their credentials
+    # before every action, but this can be overridden to allow a single
+    # validation to be re-used.  This weakens the protections afforded by
+    # the user-interactive authentication process, by allowing for multiple
+    # (and potentially different) operations to use the same validation session.
+    #
+    # Uncomment below to allow for credential validation to last for 15
+    # seconds.
+    #
+    #session_timeout: "15s"
+
+
+# Configuration for sending emails from Synapse.
+#
+email:
+  # The hostname of the outgoing SMTP server to use. Defaults to 'localhost'.
+  #
+  #smtp_host: mail.server
+
+  # The port on the mail server for outgoing SMTP. Defaults to 25.
+  #
+  #smtp_port: 587
+
+  # Username/password for authentication to the SMTP server. By default, no
+  # authentication is attempted.
+  #
+  #smtp_user: "exampleusername"
+  #smtp_pass: "examplepassword"
+
+  # Uncomment the following to require TLS transport security for SMTP.
+  # By default, Synapse will connect over plain text, and will then switch to
+  # TLS via STARTTLS *if the SMTP server supports it*. If this option is set,
+  # Synapse will refuse to connect unless the server supports STARTTLS.
+  #
+  #require_transport_security: true
+
+  # notif_from defines the "From" address to use when sending emails.
+  # It must be set if email sending is enabled.
+  #
+  # The placeholder '%(app)s' will be replaced by the application name,
+  # which is normally 'app_name' (below), but may be overridden by the
+  # Matrix client application.
+  #
+  # Note that the placeholder must be written '%(app)s', including the
+  # trailing 's'.
+  #
+  #notif_from: "Your Friendly %(app)s homeserver "
+
+  # app_name defines the default value for '%(app)s' in notif_from and email
+  # subjects. It defaults to 'Matrix'.
+  #
+  #app_name: my_branded_matrix_server
+
+  # Uncomment the following to enable sending emails for messages that the user
+  # has missed. Disabled by default.
+  #
+  #enable_notifs: true
+
+  # Uncomment the following to disable automatic subscription to email
+  # notifications for new users. Enabled by default.
+  #
+  #notif_for_new_users: false
+
+  # Custom URL for client links within the email notifications. By default
+  # links will be based on "https://matrix.to".
+  #
+  # (This setting used to be called riot_base_url; the old name is still
+  # supported for backwards-compatibility but is now deprecated.)
+  #
+  #client_base_url: "http://localhost/riot"
+
+  # Configure the time that a validation email will expire after sending.
+  # Defaults to 1h.
+  #
+  #validation_token_lifetime: 15m
+
+  # The web client location to direct users to during an invite. This is passed
+  # to the identity server as the org.matrix.web_client_location key. Defaults
+  # to unset, giving no guidance to the identity server.
+  #
+  #invite_client_location: https://app.element.io
+
+  # Directory in which Synapse will try to find the template files below.
+  # If not set, or the files named below are not found within the template
+  # directory, default templates from within the Synapse package will be used.
+  #
+  # Synapse will look for the following templates in this directory:
+  #
+  # * The contents of email notifications of missed events: 'notif_mail.html' and
+  #   'notif_mail.txt'.
+  #
+  # * The contents of account expiry notice emails: 'notice_expiry.html' and
+  #   'notice_expiry.txt'.
+  #
+  # * The contents of password reset emails sent by the homeserver:
+  #   'password_reset.html' and 'password_reset.txt'
+  #
+  # * An HTML page that a user will see when they follow the link in the password
+  #   reset email. The user will be asked to confirm the action before their
+  #   password is reset: 'password_reset_confirmation.html'
+  #
+  # * HTML pages for success and failure that a user will see when they confirm
+  #   the password reset flow using the page above: 'password_reset_success.html'
+  #   and 'password_reset_failure.html'
+  #
+  # * The contents of address verification emails sent during registration:
+  #   'registration.html' and 'registration.txt'
+  #
+  # * HTML pages for success and failure that a user will see when they follow
+  #   the link in an address verification email sent during registration:
+  #   'registration_success.html' and 'registration_failure.html'
+  #
+  # * The contents of address verification emails sent when an address is added
+  #   to a Matrix account: 'add_threepid.html' and 'add_threepid.txt'
+  #
+  # * HTML pages for success and failure that a user will see when they follow
+  #   the link in an address verification email sent when an address is added
+  #   to a Matrix account: 'add_threepid_success.html' and
+  #   'add_threepid_failure.html'
+  #
+  # You can see the default templates at:
+  # https://github.com/matrix-org/synapse/tree/master/synapse/res/templates
+  #
+  #template_dir: "res/templates"
+
+  # Subjects to use when sending emails from Synapse.
+  #
+  # The placeholder '%(app)s' will be replaced with the value of the 'app_name'
+  # setting above, or by a value dictated by the Matrix client application.
+  #
+  # If a subject isn't overridden in this configuration file, the value used as
+  # its example will be used.
+  #
+  #subjects:
+
+    # Subjects for notification emails.
+    #
+    # On top of the '%(app)s' placeholder, these can use the following
+    # placeholders:
+    #
+    #   * '%(person)s', which will be replaced by the display name of the user(s)
+    #      that sent the message(s), e.g. "Alice and Bob".
+    #   * '%(room)s', which will be replaced by the name of the room the
+    #      message(s) have been sent to, e.g. "My super room".
+    #
+    # See the example provided for each setting to see which placeholder can be
+    # used and how to use them.
+    #
+    # Subject to use to notify about one message from one or more user(s) in a
+    # room which has a name.
+    #message_from_person_in_room: "[%(app)s] You have a message on %(app)s from %(person)s in the %(room)s room..."
+    #
+    # Subject to use to notify about one message from one or more user(s) in a
+    # room which doesn't have a name.
+    #message_from_person: "[%(app)s] You have a message on %(app)s from %(person)s..."
+    #
+    # Subject to use to notify about multiple messages from one or more users in
+    # a room which doesn't have a name.
+    #messages_from_person: "[%(app)s] You have messages on %(app)s from %(person)s..."
+    #
+    # Subject to use to notify about multiple messages in a room which has a
+    # name.
+    #messages_in_room: "[%(app)s] You have messages on %(app)s in the %(room)s room..."
+    #
+    # Subject to use to notify about multiple messages in multiple rooms.
+    #messages_in_room_and_others: "[%(app)s] You have messages on %(app)s in the %(room)s room and others..."
+    #
+    # Subject to use to notify about multiple messages from multiple persons in
+    # multiple rooms. This is similar to the setting above except it's used when
+    # the room in which the notification was triggered has no name.
+    #messages_from_person_and_others: "[%(app)s] You have messages on %(app)s from %(person)s and others..."
+    #
+    # Subject to use to notify about an invite to a room which has a name.
+    #invite_from_person_to_room: "[%(app)s] %(person)s has invited you to join the %(room)s room on %(app)s..."
+    #
+    # Subject to use to notify about an invite to a room which doesn't have a
+    # name.
+    #invite_from_person: "[%(app)s] %(person)s has invited you to chat on %(app)s..."
+
+    # Subject for emails related to account administration.
+    #
+    # On top of the '%(app)s' placeholder, these one can use the
+    # '%(server_name)s' placeholder, which will be replaced by the value of the
+    # 'server_name' setting in your Synapse configuration.
+    #
+    # Subject to use when sending a password reset email.
+    #password_reset: "[%(server_name)s] Password reset"
+    #
+    # Subject to use when sending a verification email to assert an address's
+    # ownership.
+    #email_validation: "[%(server_name)s] Validate your email"
+
+
+# Password providers allow homeserver administrators to integrate
+# their Synapse installation with existing authentication methods
+# ex. LDAP, external tokens, etc.
+#
+# For more information and known implementations, please see
+# https://github.com/matrix-org/synapse/blob/master/docs/password_auth_providers.md
+#
+# Note: instances wishing to use SAML or CAS authentication should
+# instead use the `saml2_config` or `cas_config` options,
+# respectively.
+#
+password_providers:
+#    # Example config for an LDAP auth provider
+#    - module: "ldap_auth_provider.LdapAuthProvider"
+#      config:
+#        enabled: true
+#        uri: "ldap://ldap.example.com:389"
+#        start_tls: true
+#        base: "ou=users,dc=example,dc=com"
+#        attributes:
+#           uid: "cn"
+#           mail: "email"
+#           name: "givenName"
+#        #bind_dn:
+#        #bind_password:
+#        #filter: "(objectClass=posixAccount)"
+
+
+
+## Push ##
+
+push:
+  # Clients requesting push notifications can either have the body of
+  # the message sent in the notification poke along with other details
+  # like the sender, or just the event ID and room ID (`event_id_only`).
+  # If clients choose the former, this option controls whether the
+  # notification request includes the content of the event (other details
+  # like the sender are still included). For `event_id_only` push, it
+  # has no effect.
+  #
+  # For modern android devices the notification content will still appear
+  # because it is loaded by the app. iPhone, however will send a
+  # notification saying only that a message arrived and who it came from.
+  #
+  # The default value is "true" to include message details. Uncomment to only
+  # include the event ID and room ID in push notification payloads.
+  #
+  #include_content: false
+
+  # When a push notification is received, an unread count is also sent.
+  # This number can either be calculated as the number of unread messages
+  # for the user, or the number of *rooms* the user has unread messages in.
+  #
+  # The default value is "true", meaning push clients will see the number of
+  # rooms with unread messages in them. Uncomment to instead send the number
+  # of unread messages.
+  #
+  #group_unread_count_by_room: false
+
+
+# Spam checkers are third-party modules that can block specific actions
+# of local users, such as creating rooms and registering undesirable
+# usernames, as well as remote users by redacting incoming events.
+#
+spam_checker:
+   #- module: "my_custom_project.SuperSpamChecker"
+   #  config:
+   #    example_option: 'things'
+   #- module: "some_other_project.BadEventStopper"
+   #  config:
+   #    example_stop_events_from: ['@bad:example.com']
+
+
+## Rooms ##
+
+# Controls whether locally-created rooms should be end-to-end encrypted by
+# default.
+#
+# Possible options are "all", "invite", and "off". They are defined as:
+#
+# * "all": any locally-created room
+# * "invite": any room created with the "private_chat" or "trusted_private_chat"
+#             room creation presets
+# * "off": this option will take no effect
+#
+# The default value is "off".
+#
+# Note that this option will only affect rooms created after it is set. It
+# will also not affect rooms created by other servers.
+#
+#encryption_enabled_by_default_for_room_type: invite
+
+
+# Uncomment to allow non-server-admin users to create groups on this server
+#
+#enable_group_creation: true
+
+# If enabled, non server admins can only create groups with local parts
+# starting with this prefix
+#
+#group_creation_prefix: "unofficial_"
+
+
+
+# User Directory configuration
+#
+user_directory:
+    # Defines whether users can search the user directory. If false then
+    # empty responses are returned to all queries. Defaults to true.
+    #
+    # Uncomment to disable the user directory.
+    #
+    #enabled: false
+
+    # Defines whether to search all users visible to your HS when searching
+    # the user directory, rather than limiting to users visible in public
+    # rooms. Defaults to false.
+    #
+    # If you set it true, you'll have to rebuild the user_directory search
+    # indexes, see:
+    # https://github.com/matrix-org/synapse/blob/master/docs/user_directory.md
+    #
+    # Uncomment to return search results containing all known users, even if that
+    # user does not share a room with the requester.
+    #
+    #search_all_users: true
+
+    # Defines whether to prefer local users in search query results.
+    # If True, local users are more likely to appear above remote users
+    # when searching the user directory. Defaults to false.
+    #
+    # Uncomment to prefer local over remote users in user directory search
+    # results.
+    #
+    #prefer_local_users: true
+
+
+# User Consent configuration
+#
+# for detailed instructions, see
+# https://github.com/matrix-org/synapse/blob/master/docs/consent_tracking.md
+#
+# Parts of this section are required if enabling the 'consent' resource under
+# 'listeners', in particular 'template_dir' and 'version'.
+#
+# 'template_dir' gives the location of the templates for the HTML forms.
+# This directory should contain one subdirectory per language (eg, 'en', 'fr'),
+# and each language directory should contain the policy document (named as
+# '.html') and a success page (success.html).
+#
+# 'version' specifies the 'current' version of the policy document. It defines
+# the version to be served by the consent resource if there is no 'v'
+# parameter.
+#
+# 'server_notice_content', if enabled, will send a user a "Server Notice"
+# asking them to consent to the privacy policy. The 'server_notices' section
+# must also be configured for this to work. Notices will *not* be sent to
+# guest users unless 'send_server_notice_to_guests' is set to true.
+#
+# 'block_events_error', if set, will block any attempts to send events
+# until the user consents to the privacy policy. The value of the setting is
+# used as the text of the error.
+#
+# 'require_at_registration', if enabled, will add a step to the registration
+# process, similar to how captcha works. Users will be required to accept the
+# policy before their account is created.
+#
+# 'policy_name' is the display name of the policy users will see when registering
+# for an account. Has no effect unless `require_at_registration` is enabled.
+# Defaults to "Privacy Policy".
+#
+#user_consent:
+#  template_dir: res/templates/privacy
+#  version: 1.0
+#  server_notice_content:
+#    msgtype: m.text
+#    body: >-
+#      To continue using this homeserver you must review and agree to the
+#      terms and conditions at %(consent_uri)s
+#  send_server_notice_to_guests: true
+#  block_events_error: >-
+#    To continue using this homeserver you must review and agree to the
+#    terms and conditions at %(consent_uri)s
+#  require_at_registration: false
+#  policy_name: Privacy Policy
+#
+
+
+
+# Settings for local room and user statistics collection. See
+# docs/room_and_user_statistics.md.
+#
+stats:
+  # Uncomment the following to disable room and user statistics. Note that doing
+  # so may cause certain features (such as the room directory) not to work
+  # correctly.
+  #
+  #enabled: false
+
+  # The size of each timeslice in the room_stats_historical and
+  # user_stats_historical tables, as a time period. Defaults to "1d".
+  #
+  #bucket_size: 1h
+
+
+# Server Notices room configuration
+#
+# Uncomment this section to enable a room which can be used to send notices
+# from the server to users. It is a special room which cannot be left; notices
+# come from a special "notices" user id.
+#
+# If you uncomment this section, you *must* define the system_mxid_localpart
+# setting, which defines the id of the user which will be used to send the
+# notices.
+#
+# It's also possible to override the room name, the display name of the
+# "notices" user, and the avatar for the user.
+#
+#server_notices:
+#  system_mxid_localpart: notices
+#  system_mxid_display_name: "Server Notices"
+#  system_mxid_avatar_url: "mxc://server.com/oumMVlgDnLYFaPVkExemNVVZ"
+#  room_name: "Server Notices"
+
+
+
+# Uncomment to disable searching the public room list. When disabled
+# blocks searching local and remote room lists for local and remote
+# users by always returning an empty list for all queries.
+#
+#enable_room_list_search: false
+
+# The `alias_creation` option controls who's allowed to create aliases
+# on this server.
+#
+# The format of this option is a list of rules that contain globs that
+# match against user_id, room_id and the new alias (fully qualified with
+# server name). The action in the first rule that matches is taken,
+# which can currently either be "allow" or "deny".
+#
+# Missing user_id/room_id/alias fields default to "*".
+#
+# If no rules match the request is denied. An empty list means no one
+# can create aliases.
+#
+# Options for the rules include:
+#
+#   user_id: Matches against the creator of the alias
+#   alias: Matches against the alias being created
+#   room_id: Matches against the room ID the alias is being pointed at
+#   action: Whether to "allow" or "deny" the request if the rule matches
+#
+# The default is:
+#
+#alias_creation_rules:
+#  - user_id: "*"
+#    alias: "*"
+#    room_id: "*"
+#    action: allow
+
+# The `room_list_publication_rules` option controls who can publish and
+# which rooms can be published in the public room list.
+#
+# The format of this option is the same as that for
+# `alias_creation_rules`.
+#
+# If the room has one or more aliases associated with it, only one of
+# the aliases needs to match the alias rule. If there are no aliases
+# then only rules with `alias: *` match.
+#
+# If no rules match the request is denied. An empty list means no one
+# can publish rooms.
+#
+# Options for the rules include:
+#
+#   user_id: Matches against the creator of the alias
+#   room_id: Matches against the room ID being published
+#   alias: Matches against any current local or canonical aliases
+#            associated with the room
+#   action: Whether to "allow" or "deny" the request if the rule matches
+#
+# The default is:
+#
+#room_list_publication_rules:
+#  - user_id: "*"
+#    alias: "*"
+#    room_id: "*"
+#    action: allow
+
+
+# Server admins can define a Python module that implements extra rules for
+# allowing or denying incoming events. In order to work, this module needs to
+# override the methods defined in synapse/events/third_party_rules.py.
+#
+# This feature is designed to be used in closed federations only, where each
+# participating server enforces the same rules.
+#
+#third_party_event_rules:
+#  module: "my_custom_project.SuperRulesSet"
+#  config:
+#    example_option: 'things'
+
+
+## Opentracing ##
+
+# These settings enable opentracing, which implements distributed tracing.
+# This allows you to observe the causal chains of events across servers
+# including requests, key lookups etc., across any server running
+# synapse or any other other services which supports opentracing
+# (specifically those implemented with Jaeger).
+#
+opentracing:
+    # tracing is disabled by default. Uncomment the following line to enable it.
+    #
+    #enabled: true
+
+    # The list of homeservers we wish to send and receive span contexts and span baggage.
+    # See docs/opentracing.rst.
+    #
+    # This is a list of regexes which are matched against the server_name of the
+    # homeserver.
+    #
+    # By default, it is empty, so no servers are matched.
+    #
+    #homeserver_whitelist:
+    #  - ".*"
+
+    # A list of the matrix IDs of users whose requests will always be traced,
+    # even if the tracing system would otherwise drop the traces due to
+    # probabilistic sampling.
+    #
+    # By default, the list is empty.
+    #
+    #force_tracing_for_users:
+    #  - "@user1:server_name"
+    #  - "@user2:server_name"
+
+    # Jaeger can be configured to sample traces at different rates.
+    # All configuration options provided by Jaeger can be set here.
+    # Jaeger's configuration is mostly related to trace sampling which
+    # is documented here:
+    # https://www.jaegertracing.io/docs/latest/sampling/.
+    #
+    #jaeger_config:
+    #  sampler:
+    #    type: const
+    #    param: 1
+    #  logging:
+    #    false
+
+
+## Workers ##
+
+# Disables sending of outbound federation transactions on the main process.
+# Uncomment if using a federation sender worker.
+#
+#send_federation: false
+
+# It is possible to run multiple federation sender workers, in which case the
+# work is balanced across them.
+#
+# This configuration must be shared between all federation sender workers, and if
+# changed all federation sender workers must be stopped at the same time and then
+# started, to ensure that all instances are running with the same config (otherwise
+# events may be dropped).
+#
+#federation_sender_instances:
+#  - federation_sender1
+
+# When using workers this should be a map from `worker_name` to the
+# HTTP replication listener of the worker, if configured.
+#
+#instance_map:
+#  worker1:
+#    host: localhost
+#    port: 8034
+
+# Experimental: When using workers you can define which workers should
+# handle event persistence and typing notifications. Any worker
+# specified here must also be in the `instance_map`.
+#
+#stream_writers:
+#  events: worker1
+#  typing: worker1
+
+# The worker that is used to run background tasks (e.g. cleaning up expired
+# data). If not provided this defaults to the main process.
+#
+#run_background_tasks_on: worker1
+
+# A shared secret used by the replication APIs to authenticate HTTP requests
+# from workers.
+#
+# By default this is unused and traffic is not authenticated.
+#
+#worker_replication_secret: ""
+
+
+# Configuration for Redis when using workers. This *must* be enabled when
+# using workers (unless using old style direct TCP configuration).
+#
+redis:
+  # Uncomment the below to enable Redis support.
+  #
+  #enabled: true
+
+  # Optional host and port to use to connect to redis. Defaults to
+  # localhost and 6379
+  #
+  #host: localhost
+  #port: 6379
+
+  # Optional password if configured on the Redis instance
+  #
+  #password: 
+
+
+# Enable experimental features in Synapse.
+#
+# Experimental features might break or be removed without a deprecation
+# period.
+#
+experimental_features:
+  # Support for Spaces (MSC1772), it enables the following:
+  #
+  # * The Spaces Summary API (MSC2946).
+  # * Restricting room membership based on space membership (MSC3083).
+  #
+  # Uncomment to disable support for Spaces.
+  #spaces_enabled: false
+
+
+# vim:ft=yaml
diff --git a/tests/tests.py b/tests/tests.py
new file mode 100644
index 0000000..eed92df
--- /dev/null
+++ b/tests/tests.py
@@ -0,0 +1,51 @@
+"""Main test module."""
+
+import json
+import os
+
+import aiohttp
+import nio
+
+from utils import MATRIX_ID, MATRIX_PW, MATRIX_URL, AbstractBotTest
+
+KEY = os.environ['API_KEY']
+
+
+class BotTest(AbstractBotTest):
+    """Main test class."""
+    async def test_errors(self):
+        """Check the bot's error paths."""
+        async with aiohttp.ClientSession() as session:
+            async with session.get('http://bot:4785') as response:
+                self.assertEqual(await response.json(), {'status': 400, 'ret': 'Invalid JSON'})
+            async with session.post('http://bot:4785', data=json.dumps({'toto': 3})) as response:
+                self.assertEqual(await response.json(), {'status': 400, 'ret': 'Missing text and/or API key property'})
+            async with session.post('http://bot:4785', data=json.dumps({'text': 3, 'key': None})) as response:
+                self.assertEqual(await response.json(), {'status': 401, 'ret': 'Invalid API key'})
+            async with session.post('http://bot:4785', data=json.dumps({'text': 3, 'key': KEY})) as response:
+                # TODO: we are not sending to a real room, so this should not be "OK"
+                self.assertEqual(await response.json(), {'status': 200, 'ret': 'OK'})
+
+    async def test_message(self):
+        """Send a markdown message, and check the result."""
+        text = '# Hello'
+        messages = []
+        client = nio.AsyncClient(MATRIX_URL, MATRIX_ID)
+
+        await client.login(MATRIX_PW)
+
+        room = await client.room_create()
+
+        url = f'http://bot:4785/{room.room_id}'
+        async with aiohttp.ClientSession() as session:
+            async with session.post(url, data=json.dumps({'text': text, 'key': KEY})) as response:
+                self.assertEqual(await response.json(), {'status': 200, 'ret': 'OK'})
+
+        sync = await client.sync()
+        messages = await client.room_messages(room.room_id, sync.next_batch)
+
+        message = messages.chunk[0]
+        self.assertEqual(message.sender, '@bot:tests')
+        self.assertEqual(message.body, text)
+        self.assertEqual(message.formatted_body, '

Hello

') + await client.close() diff --git a/tests/utils.py b/tests/utils.py new file mode 100644 index 0000000..c794c90 --- /dev/null +++ b/tests/utils.py @@ -0,0 +1,49 @@ +"""Utility tools to run tests.""" +import asyncio +import os +import time +import unittest + +import aiohttp +import yaml +from synapse._scripts.register_new_matrix_user import request_registration + +MATRIX_URL, MATRIX_ID, MATRIX_PW = (os.environ[v] for v in ['MATRIX_URL', 'MATRIX_ID', 'MATRIX_PW']) + + +class AbstractBotTest(unittest.IsolatedAsyncioTestCase): + """Abstract test class.""" + async def asyncSetUp(self): + """Set up the test environment.""" + # Wait for synapse and the bot to answer + self.assertTrue( + all(await asyncio.gather( + wait_available(f'{MATRIX_URL}/_matrix/client/r0/login', 'flows'), + wait_available('http://bot:4785/', 'status'), + ))) + + # Try to register an user for the bot. Don't worry if it already exists. + with open('homeserver.yaml') as f: + secret = yaml.safe_load(f.read()).get("registration_shared_secret", None) + request_registration(MATRIX_ID, MATRIX_PW, MATRIX_URL, secret, admin=False, user_type=None, exit=lambda x: x) + + +async def check_json(session: aiohttp.ClientSession, url: str, key: str) -> bool: + """Ensure a service at a given url answers with valid json containing a certain key.""" + try: + async with session.get(url) as response: + data = await response.json() + return key in data + except aiohttp.client_exceptions.ClientConnectorError: + return False + + +async def wait_available(url: str, key: str, timeout: int = 60) -> bool: + """Wait until a service answer correctly or timeout.""" + start = time.time() + async with aiohttp.ClientSession() as session: + while True: + if await check_json(session, url, key): + return True + if time.time() > start + timeout: + return False From 999b82487432049511b3d964989c279b69ed7ab4 Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Sun, 11 Jul 2021 16:17:09 +0200 Subject: [PATCH 018/184] setup coverage --- .github/workflows/test.yml | 4 +++- .gitignore | 1 + Dockerfile | 2 +- test.yml | 10 +++++----- tests/Dockerfile | 16 ++++++++-------- tests/start.py | 19 +++++++++++++++++++ tests/tests.py | 12 ++++++------ tests/utils.py | 5 +++-- 8 files changed, 46 insertions(+), 23 deletions(-) create mode 100644 .gitignore create mode 100755 tests/start.py diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 2819793..33692e1 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -9,4 +9,6 @@ jobs: - name: Start run: docker-compose -f test.yml up -d - name: Tests - run: docker-compose -f test.yml run --entrypoint "" tests python -m unittest + run: docker-compose -f test.yml run --entrypoint "" tests ./tests/start.py + - name: "Upload coverage to Codecov" + uses: codecov/codecov-action@v1 diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..33103d9 --- /dev/null +++ b/.gitignore @@ -0,0 +1 @@ +coverage.xml diff --git a/Dockerfile b/Dockerfile index 131bfd6..a6c9e70 100644 --- a/Dockerfile +++ b/Dockerfile @@ -2,7 +2,7 @@ FROM python:3.9-slim EXPOSE 4785 -RUN pip3 install --no-cache-dir markdown matrix-nio +RUN pip install --no-cache-dir markdown matrix-nio ADD matrix_webhook.py / diff --git a/test.yml b/test.yml index 891b72b..33eee30 100644 --- a/test.yml +++ b/test.yml @@ -1,11 +1,11 @@ version: '3' services: - bot: - build: . - env_file: - - tests/.env tests: - build: tests + build: + context: . + dockerfile: tests/Dockerfile env_file: - tests/.env + volumes: + - ./:/app diff --git a/tests/Dockerfile b/tests/Dockerfile index 2cd55d3..aa27576 100644 --- a/tests/Dockerfile +++ b/tests/Dockerfile @@ -1,17 +1,17 @@ +# Leverage a synapse base to be able to: +# "from synapse._scripts.register_new_matrix_user import request_registration" FROM matrixdotorg/synapse -# This defaults to /data which is a volume aiming at keeping data. -# Here, we want to trash those, and avoid the permission issues, so let's use something else +# This variable defaults to /data which is a volume made to keep data. +# Here, we want to trash those (and avoid the permission issues) by using something else ENV SYNAPSE_CONFIG_DIR=/srv +# Generate keys for synapse WORKDIR $SYNAPSE_CONFIG_DIR - -ADD homeserver.yaml . - +ADD tests/homeserver.yaml . RUN python -m synapse.app.homeserver --config-path homeserver.yaml --generate-keys - RUN chown -R 991:991 . -RUN python -m pip install aiohttp matrix-nio +RUN pip install --no-cache-dir aiohttp matrix-nio markdown coverage -ADD . . +WORKDIR /app diff --git a/tests/start.py b/tests/start.py new file mode 100755 index 0000000..b27512b --- /dev/null +++ b/tests/start.py @@ -0,0 +1,19 @@ +#!/usr/bin/env python +"""Entry point to start an instrumentalized bot for coverage and run tests.""" + +from subprocess import Popen, run +from unittest import main + + +def run_and_test(): + """Launch the bot and its tests.""" + bot = Popen(['coverage', 'run', 'matrix_webhook.py']) + ret = main(module=None, exit=False).result.wasSuccessful() + bot.terminate() + for cmd in ['report', 'html', 'xml']: + run(['coverage', cmd]) + return ret + + +if __name__ == '__main__': + exit(not run_and_test()) diff --git a/tests/tests.py b/tests/tests.py index eed92df..cdb32a8 100644 --- a/tests/tests.py +++ b/tests/tests.py @@ -6,7 +6,7 @@ import os import aiohttp import nio -from utils import MATRIX_ID, MATRIX_PW, MATRIX_URL, AbstractBotTest +from .utils import BOT_URL, MATRIX_ID, MATRIX_PW, MATRIX_URL, AbstractBotTest KEY = os.environ['API_KEY'] @@ -16,13 +16,13 @@ class BotTest(AbstractBotTest): async def test_errors(self): """Check the bot's error paths.""" async with aiohttp.ClientSession() as session: - async with session.get('http://bot:4785') as response: + async with session.get(BOT_URL) as response: self.assertEqual(await response.json(), {'status': 400, 'ret': 'Invalid JSON'}) - async with session.post('http://bot:4785', data=json.dumps({'toto': 3})) as response: + async with session.post(BOT_URL, data=json.dumps({'toto': 3})) as response: self.assertEqual(await response.json(), {'status': 400, 'ret': 'Missing text and/or API key property'}) - async with session.post('http://bot:4785', data=json.dumps({'text': 3, 'key': None})) as response: + async with session.post(BOT_URL, data=json.dumps({'text': 3, 'key': None})) as response: self.assertEqual(await response.json(), {'status': 401, 'ret': 'Invalid API key'}) - async with session.post('http://bot:4785', data=json.dumps({'text': 3, 'key': KEY})) as response: + async with session.post(BOT_URL, data=json.dumps({'text': 3, 'key': KEY})) as response: # TODO: we are not sending to a real room, so this should not be "OK" self.assertEqual(await response.json(), {'status': 200, 'ret': 'OK'}) @@ -36,7 +36,7 @@ class BotTest(AbstractBotTest): room = await client.room_create() - url = f'http://bot:4785/{room.room_id}' + url = f'{BOT_URL}/{room.room_id}' async with aiohttp.ClientSession() as session: async with session.post(url, data=json.dumps({'text': text, 'key': KEY})) as response: self.assertEqual(await response.json(), {'status': 200, 'ret': 'OK'}) diff --git a/tests/utils.py b/tests/utils.py index c794c90..3b4bd09 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -8,6 +8,7 @@ import aiohttp import yaml from synapse._scripts.register_new_matrix_user import request_registration +BOT_URL = 'http://localhost:4785' MATRIX_URL, MATRIX_ID, MATRIX_PW = (os.environ[v] for v in ['MATRIX_URL', 'MATRIX_ID', 'MATRIX_PW']) @@ -19,11 +20,11 @@ class AbstractBotTest(unittest.IsolatedAsyncioTestCase): self.assertTrue( all(await asyncio.gather( wait_available(f'{MATRIX_URL}/_matrix/client/r0/login', 'flows'), - wait_available('http://bot:4785/', 'status'), + wait_available(BOT_URL, 'status'), ))) # Try to register an user for the bot. Don't worry if it already exists. - with open('homeserver.yaml') as f: + with open('/srv/homeserver.yaml') as f: secret = yaml.safe_load(f.read()).get("registration_shared_secret", None) request_registration(MATRIX_ID, MATRIX_PW, MATRIX_URL, secret, admin=False, user_type=None, exit=lambda x: x) From 6a2e0336d922bc86f1717e0813b096581c80b502 Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Tue, 13 Jul 2021 01:16:24 +0200 Subject: [PATCH 019/184] clean tests --- tests/Dockerfile | 2 +- tests/start.py | 40 ++++++++++++++++++++++++++++++++ tests/tests.py | 60 +++++++++++++++++++++++++++++++----------------- tests/utils.py | 50 ---------------------------------------- 4 files changed, 80 insertions(+), 72 deletions(-) delete mode 100644 tests/utils.py diff --git a/tests/Dockerfile b/tests/Dockerfile index aa27576..af2e1bd 100644 --- a/tests/Dockerfile +++ b/tests/Dockerfile @@ -12,6 +12,6 @@ ADD tests/homeserver.yaml . RUN python -m synapse.app.homeserver --config-path homeserver.yaml --generate-keys RUN chown -R 991:991 . -RUN pip install --no-cache-dir aiohttp matrix-nio markdown coverage +RUN pip install --no-cache-dir markdown matrix-nio httpx coverage WORKDIR /app diff --git a/tests/start.py b/tests/start.py index b27512b..8eef22e 100755 --- a/tests/start.py +++ b/tests/start.py @@ -1,13 +1,53 @@ #!/usr/bin/env python """Entry point to start an instrumentalized bot for coverage and run tests.""" +from os import environ from subprocess import Popen, run +from time import time from unittest import main +import httpx +import yaml +from synapse._scripts.register_new_matrix_user import request_registration + +BOT_URL = 'http://localhost:4785' +MATRIX_URL, MATRIX_ID, MATRIX_PW = (environ[v] for v in ['MATRIX_URL', 'MATRIX_ID', 'MATRIX_PW']) + + +def check_json(url: str, key: str) -> bool: + """Ensure a service at a given url answers with valid json containing a certain key.""" + try: + data = httpx.get(url).json() + return key in data + except httpx.ConnectError: + return False + + +def wait_available(url: str, key: str, timeout: int = 10) -> bool: + """Wait until a service answer correctly or timeout.""" + start = time() + while True: + if check_json(url, key): + return True + if time() > start + timeout: + return False + def run_and_test(): """Launch the bot and its tests.""" + if not wait_available(f'{MATRIX_URL}/_matrix/client/r0/login', 'flows'): + return False + + # Try to register an user for the bot. + with open('/srv/homeserver.yaml') as f: + secret = yaml.safe_load(f.read()).get("registration_shared_secret", None) + request_registration(MATRIX_ID, MATRIX_PW, MATRIX_URL, secret, admin=True) + bot = Popen(['coverage', 'run', 'matrix_webhook.py']) + + if not wait_available(BOT_URL, 'status'): + return False + ret = main(module=None, exit=False).result.wasSuccessful() bot.terminate() for cmd in ['report', 'html', 'xml']: diff --git a/tests/tests.py b/tests/tests.py index cdb32a8..9a0e807 100644 --- a/tests/tests.py +++ b/tests/tests.py @@ -1,30 +1,35 @@ """Main test module.""" -import json import os +import unittest -import aiohttp +import httpx import nio -from .utils import BOT_URL, MATRIX_ID, MATRIX_PW, MATRIX_URL, AbstractBotTest +from .start import BOT_URL, MATRIX_ID, MATRIX_PW, MATRIX_URL KEY = os.environ['API_KEY'] +FULL_ID = f'@{MATRIX_ID}:{MATRIX_URL.split("/")[2]}' -class BotTest(AbstractBotTest): +def bot_req(req=None, key=None, room_id=None): + """Bot requests boilerplate.""" + if key is not None: + req['key'] = key + url = BOT_URL if room_id is None else f'{BOT_URL}/{room_id}' + return httpx.post(url, json=req).json() + + +class BotTest(unittest.IsolatedAsyncioTestCase): """Main test class.""" - async def test_errors(self): + def test_errors(self): """Check the bot's error paths.""" - async with aiohttp.ClientSession() as session: - async with session.get(BOT_URL) as response: - self.assertEqual(await response.json(), {'status': 400, 'ret': 'Invalid JSON'}) - async with session.post(BOT_URL, data=json.dumps({'toto': 3})) as response: - self.assertEqual(await response.json(), {'status': 400, 'ret': 'Missing text and/or API key property'}) - async with session.post(BOT_URL, data=json.dumps({'text': 3, 'key': None})) as response: - self.assertEqual(await response.json(), {'status': 401, 'ret': 'Invalid API key'}) - async with session.post(BOT_URL, data=json.dumps({'text': 3, 'key': KEY})) as response: - # TODO: we are not sending to a real room, so this should not be "OK" - self.assertEqual(await response.json(), {'status': 200, 'ret': 'OK'}) + self.assertEqual(bot_req(), {'status': 400, 'ret': 'Invalid JSON'}) + self.assertEqual(bot_req({'toto': 3}), {'status': 400, 'ret': 'Missing text and/or API key property'}) + self.assertEqual(bot_req({'text': 3, 'key': None}), {'status': 401, 'ret': 'Invalid API key'}) + + # TODO: we are not sending to a real room, so this should not be "OK" + self.assertEqual(bot_req({'text': 3}, KEY), {'status': 200, 'ret': 'OK'}) async def test_message(self): """Send a markdown message, and check the result.""" @@ -33,19 +38,32 @@ class BotTest(AbstractBotTest): client = nio.AsyncClient(MATRIX_URL, MATRIX_ID) await client.login(MATRIX_PW) - room = await client.room_create() - url = f'{BOT_URL}/{room.room_id}' - async with aiohttp.ClientSession() as session: - async with session.post(url, data=json.dumps({'text': text, 'key': KEY})) as response: - self.assertEqual(await response.json(), {'status': 200, 'ret': 'OK'}) + self.assertEqual(bot_req({'text': text}, KEY, room.room_id), {'status': 200, 'ret': 'OK'}) sync = await client.sync() messages = await client.room_messages(room.room_id, sync.next_batch) + await client.close() message = messages.chunk[0] - self.assertEqual(message.sender, '@bot:tests') + self.assertEqual(message.sender, FULL_ID) self.assertEqual(message.body, text) self.assertEqual(message.formatted_body, '

Hello

') + + async def test_z_disconnected(self): + """Send a message after disconnection, and check the error.""" + client = nio.AsyncClient(MATRIX_URL, MATRIX_ID) + await client.login(MATRIX_PW) + token = client.access_token + + resp = httpx.post(f'{MATRIX_URL}/_synapse/admin/v1/deactivate/{FULL_ID}', + json={'erase': True}, + params={'access_token': token}) + self.assertEqual(resp.json(), {'id_server_unbind_result': 'success'}) + + await client.logout(all_devices=True) await client.close() + + # TODO: I was hopping that one wouldn't be happy + self.assertEqual(bot_req({'text': 'bye'}, KEY), {'status': 200, 'ret': 'OK'}) diff --git a/tests/utils.py b/tests/utils.py deleted file mode 100644 index 3b4bd09..0000000 --- a/tests/utils.py +++ /dev/null @@ -1,50 +0,0 @@ -"""Utility tools to run tests.""" -import asyncio -import os -import time -import unittest - -import aiohttp -import yaml -from synapse._scripts.register_new_matrix_user import request_registration - -BOT_URL = 'http://localhost:4785' -MATRIX_URL, MATRIX_ID, MATRIX_PW = (os.environ[v] for v in ['MATRIX_URL', 'MATRIX_ID', 'MATRIX_PW']) - - -class AbstractBotTest(unittest.IsolatedAsyncioTestCase): - """Abstract test class.""" - async def asyncSetUp(self): - """Set up the test environment.""" - # Wait for synapse and the bot to answer - self.assertTrue( - all(await asyncio.gather( - wait_available(f'{MATRIX_URL}/_matrix/client/r0/login', 'flows'), - wait_available(BOT_URL, 'status'), - ))) - - # Try to register an user for the bot. Don't worry if it already exists. - with open('/srv/homeserver.yaml') as f: - secret = yaml.safe_load(f.read()).get("registration_shared_secret", None) - request_registration(MATRIX_ID, MATRIX_PW, MATRIX_URL, secret, admin=False, user_type=None, exit=lambda x: x) - - -async def check_json(session: aiohttp.ClientSession, url: str, key: str) -> bool: - """Ensure a service at a given url answers with valid json containing a certain key.""" - try: - async with session.get(url) as response: - data = await response.json() - return key in data - except aiohttp.client_exceptions.ClientConnectorError: - return False - - -async def wait_available(url: str, key: str, timeout: int = 60) -> bool: - """Wait until a service answer correctly or timeout.""" - start = time.time() - async with aiohttp.ClientSession() as session: - while True: - if await check_json(session, url, key): - return True - if time.time() > start + timeout: - return False From 2d8c68665ebb6ff5ec432a38e841c77592d02576 Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Tue, 13 Jul 2021 08:53:10 +0200 Subject: [PATCH 020/184] details --- .gitignore | 4 ++++ Dockerfile | 2 +- tests/start.py | 19 +++++++++---------- 3 files changed, 14 insertions(+), 11 deletions(-) diff --git a/.gitignore b/.gitignore index 33103d9..9edda3c 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,5 @@ +.coverage +.env +.mypy_cache coverage.xml +htmlcov diff --git a/Dockerfile b/Dockerfile index a6c9e70..da6ffa2 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.9-slim +FROM python:3.9 EXPOSE 4785 diff --git a/tests/start.py b/tests/start.py index 8eef22e..79ac0a8 100755 --- a/tests/start.py +++ b/tests/start.py @@ -14,17 +14,16 @@ BOT_URL = 'http://localhost:4785' MATRIX_URL, MATRIX_ID, MATRIX_PW = (environ[v] for v in ['MATRIX_URL', 'MATRIX_ID', 'MATRIX_PW']) -def check_json(url: str, key: str) -> bool: - """Ensure a service at a given url answers with valid json containing a certain key.""" - try: - data = httpx.get(url).json() - return key in data - except httpx.ConnectError: - return False - - def wait_available(url: str, key: str, timeout: int = 10) -> bool: """Wait until a service answer correctly or timeout.""" + def check_json(url: str, key: str) -> bool: + """Ensure a service at a given url answers with valid json containing a certain key.""" + try: + data = httpx.get(url).json() + return key in data + except httpx.ConnectError: + return False + start = time() while True: if check_json(url, key): @@ -38,7 +37,7 @@ def run_and_test(): if not wait_available(f'{MATRIX_URL}/_matrix/client/r0/login', 'flows'): return False - # Try to register an user for the bot. + # Try to register a user for the bot. with open('/srv/homeserver.yaml') as f: secret = yaml.safe_load(f.read()).get("registration_shared_secret", None) request_registration(MATRIX_ID, MATRIX_PW, MATRIX_URL, secret, admin=True) From 6633020fba07ba367285747888af99d31032ceaa Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Tue, 13 Jul 2021 10:28:40 +0200 Subject: [PATCH 021/184] clean tests --- .github/workflows/test.yml | 8 +++----- test.yml | 1 + tests/Dockerfile | 2 ++ tests/start.py | 25 ++++++++++++++++++++++--- tests/tests.py | 32 +------------------------------- 5 files changed, 29 insertions(+), 39 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 33692e1..31504e0 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -4,11 +4,9 @@ jobs: tests: runs-on: ubuntu-latest steps: - - name: Check out repository code + - name: Checkout uses: actions/checkout@v2 - - name: Start - run: docker-compose -f test.yml up -d - name: Tests - run: docker-compose -f test.yml run --entrypoint "" tests ./tests/start.py - - name: "Upload coverage to Codecov" + run: docker-compose -f test.yml up --exit-code-from tests + - name: Coverage uses: codecov/codecov-action@v1 diff --git a/test.yml b/test.yml index 33eee30..7ffda14 100644 --- a/test.yml +++ b/test.yml @@ -5,6 +5,7 @@ services: build: context: . dockerfile: tests/Dockerfile + entrypoint: "" env_file: - tests/.env volumes: diff --git a/tests/Dockerfile b/tests/Dockerfile index af2e1bd..09778e3 100644 --- a/tests/Dockerfile +++ b/tests/Dockerfile @@ -15,3 +15,5 @@ RUN chown -R 991:991 . RUN pip install --no-cache-dir markdown matrix-nio httpx coverage WORKDIR /app + +CMD ./tests/start.py diff --git a/tests/start.py b/tests/start.py index 79ac0a8..97cc0f0 100755 --- a/tests/start.py +++ b/tests/start.py @@ -11,7 +11,16 @@ import yaml from synapse._scripts.register_new_matrix_user import request_registration BOT_URL = 'http://localhost:4785' -MATRIX_URL, MATRIX_ID, MATRIX_PW = (environ[v] for v in ['MATRIX_URL', 'MATRIX_ID', 'MATRIX_PW']) +KEY, MATRIX_URL, MATRIX_ID, MATRIX_PW = (environ[v] for v in ['API_KEY', 'MATRIX_URL', 'MATRIX_ID', 'MATRIX_PW']) +FULL_ID = f'@{MATRIX_ID}:{MATRIX_URL.split("/")[2]}' + + +def bot_req(req=None, key=None, room_id=None): + """Bot requests boilerplate.""" + if key is not None: + req['key'] = key + url = BOT_URL if room_id is None else f'{BOT_URL}/{room_id}' + return httpx.post(url, json=req).json() def wait_available(url: str, key: str, timeout: int = 10) -> bool: @@ -34,21 +43,31 @@ def wait_available(url: str, key: str, timeout: int = 10) -> bool: def run_and_test(): """Launch the bot and its tests.""" + # Start the server, and wait for it + srv = Popen(['python', '-m', 'synapse.app.homeserver', '--config-path', '/srv/homeserver.yaml']) if not wait_available(f'{MATRIX_URL}/_matrix/client/r0/login', 'flows'): return False - # Try to register a user for the bot. + # Register a user for the bot. with open('/srv/homeserver.yaml') as f: secret = yaml.safe_load(f.read()).get("registration_shared_secret", None) request_registration(MATRIX_ID, MATRIX_PW, MATRIX_URL, secret, admin=True) + # Start the bot, and wait for it bot = Popen(['coverage', 'run', 'matrix_webhook.py']) - if not wait_available(BOT_URL, 'status'): return False + # Run the main unittest module ret = main(module=None, exit=False).result.wasSuccessful() + + srv.terminate() + + # TODO Check what the bot says when the server is offline + # print(bot_req({'text': 'bye'}, KEY), {'status': 200, 'ret': 'OK'}) + bot.terminate() + for cmd in ['report', 'html', 'xml']: run(['coverage', cmd]) return ret diff --git a/tests/tests.py b/tests/tests.py index 9a0e807..3bf710e 100644 --- a/tests/tests.py +++ b/tests/tests.py @@ -1,23 +1,10 @@ """Main test module.""" -import os import unittest -import httpx import nio -from .start import BOT_URL, MATRIX_ID, MATRIX_PW, MATRIX_URL - -KEY = os.environ['API_KEY'] -FULL_ID = f'@{MATRIX_ID}:{MATRIX_URL.split("/")[2]}' - - -def bot_req(req=None, key=None, room_id=None): - """Bot requests boilerplate.""" - if key is not None: - req['key'] = key - url = BOT_URL if room_id is None else f'{BOT_URL}/{room_id}' - return httpx.post(url, json=req).json() +from .start import FULL_ID, KEY, MATRIX_ID, MATRIX_PW, MATRIX_URL, bot_req class BotTest(unittest.IsolatedAsyncioTestCase): @@ -50,20 +37,3 @@ class BotTest(unittest.IsolatedAsyncioTestCase): self.assertEqual(message.sender, FULL_ID) self.assertEqual(message.body, text) self.assertEqual(message.formatted_body, '

Hello

') - - async def test_z_disconnected(self): - """Send a message after disconnection, and check the error.""" - client = nio.AsyncClient(MATRIX_URL, MATRIX_ID) - await client.login(MATRIX_PW) - token = client.access_token - - resp = httpx.post(f'{MATRIX_URL}/_synapse/admin/v1/deactivate/{FULL_ID}', - json={'erase': True}, - params={'access_token': token}) - self.assertEqual(resp.json(), {'id_server_unbind_result': 'success'}) - - await client.logout(all_devices=True) - await client.close() - - # TODO: I was hopping that one wouldn't be happy - self.assertEqual(bot_req({'text': 'bye'}, KEY), {'status': 200, 'ret': 'OK'}) From 562b29c8a2763846c2b58a7c3885fb2f2652a3fb Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Tue, 13 Jul 2021 10:51:41 +0200 Subject: [PATCH 022/184] tests: generate homeserver on build --- tests/Dockerfile | 13 +- tests/homeserver.yaml | 2910 ----------------------------------------- 2 files changed, 7 insertions(+), 2916 deletions(-) delete mode 100644 tests/homeserver.yaml diff --git a/tests/Dockerfile b/tests/Dockerfile index 09778e3..0ff4cb2 100644 --- a/tests/Dockerfile +++ b/tests/Dockerfile @@ -2,15 +2,16 @@ # "from synapse._scripts.register_new_matrix_user import request_registration" FROM matrixdotorg/synapse -# This variable defaults to /data which is a volume made to keep data. +# The config dir defaults to /data which is a volume made to keep data. # Here, we want to trash those (and avoid the permission issues) by using something else -ENV SYNAPSE_CONFIG_DIR=/srv +ENV SYNAPSE_CONFIG_DIR=/srv SYNAPSE_SERVER_NAME=tests SYNAPSE_REPORT_STATS=no -# Generate keys for synapse +# Generate configuration and keys for synapse WORKDIR $SYNAPSE_CONFIG_DIR -ADD tests/homeserver.yaml . -RUN python -m synapse.app.homeserver --config-path homeserver.yaml --generate-keys -RUN chown -R 991:991 . +RUN chown -R 991:991 . \ + && /start.py generate \ + && sed -i 's=/data=/srv=;s=8008=80=;s=#sup=sup=;' homeserver.yaml \ + && python -m synapse.app.homeserver --config-path homeserver.yaml --generate-keys RUN pip install --no-cache-dir markdown matrix-nio httpx coverage diff --git a/tests/homeserver.yaml b/tests/homeserver.yaml deleted file mode 100644 index 4d3e994..0000000 --- a/tests/homeserver.yaml +++ /dev/null @@ -1,2910 +0,0 @@ -# Configuration file for Synapse. -# -# This is a YAML file: see [1] for a quick introduction. Note in particular -# that *indentation is important*: all the elements of a list or dictionary -# should have the same indentation. -# -# [1] https://docs.ansible.com/ansible/latest/reference_appendices/YAMLSyntax.html - -## Server ## - -# The public-facing domain of the server -# -# The server_name name will appear at the end of usernames and room addresses -# created on this server. For example if the server_name was example.com, -# usernames on this server would be in the format @user:example.com -# -# In most cases you should avoid using a matrix specific subdomain such as -# matrix.example.com or synapse.example.com as the server_name for the same -# reasons you wouldn't use user@email.example.com as your email address. -# See https://github.com/matrix-org/synapse/blob/master/docs/delegate.md -# for information on how to host Synapse on a subdomain while preserving -# a clean server_name. -# -# The server_name cannot be changed later so it is important to -# configure this correctly before you start Synapse. It should be all -# lowercase and may contain an explicit port. -# Examples: matrix.org, localhost:8080 -# -server_name: "tests" - -# When running as a daemon, the file to store the pid in -# -pid_file: /srv/homeserver.pid - -# The absolute URL to the web client which /_matrix/client will redirect -# to if 'webclient' is configured under the 'listeners' configuration. -# -# This option can be also set to the filesystem path to the web client -# which will be served at /_matrix/client/ if 'webclient' is configured -# under the 'listeners' configuration, however this is a security risk: -# https://github.com/matrix-org/synapse#security-note -# -#web_client_location: https://riot.example.com/ - -# The public-facing base URL that clients use to access this Homeserver (not -# including _matrix/...). This is the same URL a user might enter into the -# 'Custom Homeserver URL' field on their client. If you use Synapse with a -# reverse proxy, this should be the URL to reach Synapse via the proxy. -# Otherwise, it should be the URL to reach Synapse's client HTTP listener (see -# 'listeners' below). -# -#public_baseurl: https://example.com/ - -# Set the soft limit on the number of file descriptors synapse can use -# Zero is used to indicate synapse should set the soft limit to the -# hard limit. -# -#soft_file_limit: 0 - -# Presence tracking allows users to see the state (e.g online/offline) -# of other local and remote users. -# -presence: - # Uncomment to disable presence tracking on this homeserver. This option - # replaces the previous top-level 'use_presence' option. - # - #enabled: false - - # Presence routers are third-party modules that can specify additional logic - # to where presence updates from users are routed. - # - presence_router: - # The custom module's class. Uncomment to use a custom presence router module. - # - #module: "my_custom_router.PresenceRouter" - - # Configuration options of the custom module. Refer to your module's - # documentation for available options. - # - #config: - # example_option: 'something' - -# Whether to require authentication to retrieve profile data (avatars, -# display names) of other users through the client API. Defaults to -# 'false'. Note that profile data is also available via the federation -# API, unless allow_profile_lookup_over_federation is set to false. -# -#require_auth_for_profile_requests: true - -# Uncomment to require a user to share a room with another user in order -# to retrieve their profile information. Only checked on Client-Server -# requests. Profile requests from other servers should be checked by the -# requesting server. Defaults to 'false'. -# -#limit_profile_requests_to_users_who_share_rooms: true - -# Uncomment to prevent a user's profile data from being retrieved and -# displayed in a room until they have joined it. By default, a user's -# profile data is included in an invite event, regardless of the values -# of the above two settings, and whether or not the users share a server. -# Defaults to 'true'. -# -#include_profile_data_on_invite: false - -# If set to 'true', removes the need for authentication to access the server's -# public rooms directory through the client API, meaning that anyone can -# query the room directory. Defaults to 'false'. -# -#allow_public_rooms_without_auth: true - -# If set to 'true', allows any other homeserver to fetch the server's public -# rooms directory via federation. Defaults to 'false'. -# -#allow_public_rooms_over_federation: true - -# The default room version for newly created rooms. -# -# Known room versions are listed here: -# https://matrix.org/docs/spec/#complete-list-of-room-versions -# -# For example, for room version 1, default_room_version should be set -# to "1". -# -#default_room_version: "6" - -# The GC threshold parameters to pass to `gc.set_threshold`, if defined -# -#gc_thresholds: [700, 10, 10] - -# The minimum time in seconds between each GC for a generation, regardless of -# the GC thresholds. This ensures that we don't do GC too frequently. -# -# A value of `[1s, 10s, 30s]` indicates that a second must pass between consecutive -# generation 0 GCs, etc. -# -# Defaults to `[1s, 10s, 30s]`. -# -#gc_min_interval: [0.5s, 30s, 1m] - -# Set the limit on the returned events in the timeline in the get -# and sync operations. The default value is 100. -1 means no upper limit. -# -# Uncomment the following to increase the limit to 5000. -# -#filter_timeline_limit: 5000 - -# Whether room invites to users on this server should be blocked -# (except those sent by local server admins). The default is False. -# -#block_non_admin_invites: true - -# Room searching -# -# If disabled, new messages will not be indexed for searching and users -# will receive errors when searching for messages. Defaults to enabled. -# -#enable_search: false - -# Prevent outgoing requests from being sent to the following blacklisted IP address -# CIDR ranges. If this option is not specified then it defaults to private IP -# address ranges (see the example below). -# -# The blacklist applies to the outbound requests for federation, identity servers, -# push servers, and for checking key validity for third-party invite events. -# -# (0.0.0.0 and :: are always blacklisted, whether or not they are explicitly -# listed here, since they correspond to unroutable addresses.) -# -# This option replaces federation_ip_range_blacklist in Synapse v1.25.0. -# -#ip_range_blacklist: -# - '127.0.0.0/8' -# - '10.0.0.0/8' -# - '172.16.0.0/12' -# - '192.168.0.0/16' -# - '100.64.0.0/10' -# - '192.0.0.0/24' -# - '169.254.0.0/16' -# - '192.88.99.0/24' -# - '198.18.0.0/15' -# - '192.0.2.0/24' -# - '198.51.100.0/24' -# - '203.0.113.0/24' -# - '224.0.0.0/4' -# - '::1/128' -# - 'fe80::/10' -# - 'fc00::/7' -# - '2001:db8::/32' -# - 'ff00::/8' -# - 'fec0::/10' - -# List of IP address CIDR ranges that should be allowed for federation, -# identity servers, push servers, and for checking key validity for -# third-party invite events. This is useful for specifying exceptions to -# wide-ranging blacklisted target IP ranges - e.g. for communication with -# a push server only visible in your network. -# -# This whitelist overrides ip_range_blacklist and defaults to an empty -# list. -# -#ip_range_whitelist: -# - '192.168.1.1' - -# List of ports that Synapse should listen on, their purpose and their -# configuration. -# -# Options for each listener include: -# -# port: the TCP port to bind to -# -# bind_addresses: a list of local addresses to listen on. The default is -# 'all local interfaces'. -# -# type: the type of listener. Normally 'http', but other valid options are: -# 'manhole' (see docs/manhole.md), -# 'metrics' (see docs/metrics-howto.md), -# 'replication' (see docs/workers.md). -# -# tls: set to true to enable TLS for this listener. Will use the TLS -# key/cert specified in tls_private_key_path / tls_certificate_path. -# -# x_forwarded: Only valid for an 'http' listener. Set to true to use the -# X-Forwarded-For header as the client IP. Useful when Synapse is -# behind a reverse-proxy. -# -# resources: Only valid for an 'http' listener. A list of resources to host -# on this port. Options for each resource are: -# -# names: a list of names of HTTP resources. See below for a list of -# valid resource names. -# -# compress: set to true to enable HTTP compression for this resource. -# -# additional_resources: Only valid for an 'http' listener. A map of -# additional endpoints which should be loaded via dynamic modules. -# -# Valid resource names are: -# -# client: the client-server API (/_matrix/client), and the synapse admin -# API (/_synapse/admin). Also implies 'media' and 'static'. -# -# consent: user consent forms (/_matrix/consent). See -# docs/consent_tracking.md. -# -# federation: the server-server API (/_matrix/federation). Also implies -# 'media', 'keys', 'openid' -# -# keys: the key discovery API (/_matrix/keys). -# -# media: the media API (/_matrix/media). -# -# metrics: the metrics interface. See docs/metrics-howto.md. -# -# openid: OpenID authentication. -# -# replication: the HTTP replication API (/_synapse/replication). See -# docs/workers.md. -# -# static: static resources under synapse/static (/_matrix/static). (Mostly -# useful for 'fallback authentication'.) -# -# webclient: A web client. Requires web_client_location to be set. -# -listeners: - # TLS-enabled listener: for when matrix traffic is sent directly to synapse. - # - # Disabled by default. To enable it, uncomment the following. (Note that you - # will also need to give Synapse a TLS key and certificate: see the TLS section - # below.) - # - #- port: 8448 - # type: http - # tls: true - # resources: - # - names: [client, federation] - - # Unsecure HTTP listener: for when matrix traffic passes through a reverse proxy - # that unwraps TLS. - # - # If you plan to use a reverse proxy, please see - # https://github.com/matrix-org/synapse/blob/master/docs/reverse_proxy.md. - # - - port: 80 - tls: false - type: http - x_forwarded: true - - resources: - - names: [client, federation] - compress: false - - # example additional_resources: - # - #additional_resources: - # "/_matrix/my/custom/endpoint": - # module: my_module.CustomRequestHandler - # config: {} - - # Turn on the twisted ssh manhole service on localhost on the given - # port. - # - #- port: 9000 - # bind_addresses: ['::1', '127.0.0.1'] - # type: manhole - -# Forward extremities can build up in a room due to networking delays between -# homeservers. Once this happens in a large room, calculation of the state of -# that room can become quite expensive. To mitigate this, once the number of -# forward extremities reaches a given threshold, Synapse will send an -# org.matrix.dummy_event event, which will reduce the forward extremities -# in the room. -# -# This setting defines the threshold (i.e. number of forward extremities in the -# room) at which dummy events are sent. The default value is 10. -# -#dummy_events_threshold: 5 - - -## Homeserver blocking ## - -# How to reach the server admin, used in ResourceLimitError -# -#admin_contact: 'mailto:admin@server.com' - -# Global blocking -# -#hs_disabled: false -#hs_disabled_message: 'Human readable reason for why the HS is blocked' - -# Monthly Active User Blocking -# -# Used in cases where the admin or server owner wants to limit to the -# number of monthly active users. -# -# 'limit_usage_by_mau' disables/enables monthly active user blocking. When -# enabled and a limit is reached the server returns a 'ResourceLimitError' -# with error type Codes.RESOURCE_LIMIT_EXCEEDED -# -# 'max_mau_value' is the hard limit of monthly active users above which -# the server will start blocking user actions. -# -# 'mau_trial_days' is a means to add a grace period for active users. It -# means that users must be active for this number of days before they -# can be considered active and guards against the case where lots of users -# sign up in a short space of time never to return after their initial -# session. -# -# 'mau_limit_alerting' is a means of limiting client side alerting -# should the mau limit be reached. This is useful for small instances -# where the admin has 5 mau seats (say) for 5 specific people and no -# interest increasing the mau limit further. Defaults to True, which -# means that alerting is enabled -# -#limit_usage_by_mau: false -#max_mau_value: 50 -#mau_trial_days: 2 -#mau_limit_alerting: false - -# If enabled, the metrics for the number of monthly active users will -# be populated, however no one will be limited. If limit_usage_by_mau -# is true, this is implied to be true. -# -#mau_stats_only: false - -# Sometimes the server admin will want to ensure certain accounts are -# never blocked by mau checking. These accounts are specified here. -# -#mau_limit_reserved_threepids: -# - medium: 'email' -# address: 'reserved_user@example.com' - -# Used by phonehome stats to group together related servers. -#server_context: context - -# Resource-constrained homeserver settings -# -# When this is enabled, the room "complexity" will be checked before a user -# joins a new remote room. If it is above the complexity limit, the server will -# disallow joining, or will instantly leave. -# -# Room complexity is an arbitrary measure based on factors such as the number of -# users in the room. -# -limit_remote_rooms: - # Uncomment to enable room complexity checking. - # - #enabled: true - - # the limit above which rooms cannot be joined. The default is 1.0. - # - #complexity: 0.5 - - # override the error which is returned when the room is too complex. - # - #complexity_error: "This room is too complex." - - # allow server admins to join complex rooms. Default is false. - # - #admins_can_join: true - -# Whether to require a user to be in the room to add an alias to it. -# Defaults to 'true'. -# -#require_membership_for_aliases: false - -# Whether to allow per-room membership profiles through the send of membership -# events with profile information that differ from the target's global profile. -# Defaults to 'true'. -# -#allow_per_room_profiles: false - -# How long to keep redacted events in unredacted form in the database. After -# this period redacted events get replaced with their redacted form in the DB. -# -# Defaults to `7d`. Set to `null` to disable. -# -#redaction_retention_period: 28d - -# How long to track users' last seen time and IPs in the database. -# -# Defaults to `28d`. Set to `null` to disable clearing out of old rows. -# -#user_ips_max_age: 14d - -# Message retention policy at the server level. -# -# Room admins and mods can define a retention period for their rooms using the -# 'm.room.retention' state event, and server admins can cap this period by setting -# the 'allowed_lifetime_min' and 'allowed_lifetime_max' config options. -# -# If this feature is enabled, Synapse will regularly look for and purge events -# which are older than the room's maximum retention period. Synapse will also -# filter events received over federation so that events that should have been -# purged are ignored and not stored again. -# -retention: - # The message retention policies feature is disabled by default. Uncomment the - # following line to enable it. - # - #enabled: true - - # Default retention policy. If set, Synapse will apply it to rooms that lack the - # 'm.room.retention' state event. Currently, the value of 'min_lifetime' doesn't - # matter much because Synapse doesn't take it into account yet. - # - #default_policy: - # min_lifetime: 1d - # max_lifetime: 1y - - # Retention policy limits. If set, and the state of a room contains a - # 'm.room.retention' event in its state which contains a 'min_lifetime' or a - # 'max_lifetime' that's out of these bounds, Synapse will cap the room's policy - # to these limits when running purge jobs. - # - #allowed_lifetime_min: 1d - #allowed_lifetime_max: 1y - - # Server admins can define the settings of the background jobs purging the - # events which lifetime has expired under the 'purge_jobs' section. - # - # If no configuration is provided, a single job will be set up to delete expired - # events in every room daily. - # - # Each job's configuration defines which range of message lifetimes the job - # takes care of. For example, if 'shortest_max_lifetime' is '2d' and - # 'longest_max_lifetime' is '3d', the job will handle purging expired events in - # rooms whose state defines a 'max_lifetime' that's both higher than 2 days, and - # lower than or equal to 3 days. Both the minimum and the maximum value of a - # range are optional, e.g. a job with no 'shortest_max_lifetime' and a - # 'longest_max_lifetime' of '3d' will handle every room with a retention policy - # which 'max_lifetime' is lower than or equal to three days. - # - # The rationale for this per-job configuration is that some rooms might have a - # retention policy with a low 'max_lifetime', where history needs to be purged - # of outdated messages on a more frequent basis than for the rest of the rooms - # (e.g. every 12h), but not want that purge to be performed by a job that's - # iterating over every room it knows, which could be heavy on the server. - # - # If any purge job is configured, it is strongly recommended to have at least - # a single job with neither 'shortest_max_lifetime' nor 'longest_max_lifetime' - # set, or one job without 'shortest_max_lifetime' and one job without - # 'longest_max_lifetime' set. Otherwise some rooms might be ignored, even if - # 'allowed_lifetime_min' and 'allowed_lifetime_max' are set, because capping a - # room's policy to these values is done after the policies are retrieved from - # Synapse's database (which is done using the range specified in a purge job's - # configuration). - # - #purge_jobs: - # - longest_max_lifetime: 3d - # interval: 12h - # - shortest_max_lifetime: 3d - # interval: 1d - -# Inhibits the /requestToken endpoints from returning an error that might leak -# information about whether an e-mail address is in use or not on this -# homeserver. -# Note that for some endpoints the error situation is the e-mail already being -# used, and for others the error is entering the e-mail being unused. -# If this option is enabled, instead of returning an error, these endpoints will -# act as if no error happened and return a fake session ID ('sid') to clients. -# -#request_token_inhibit_3pid_errors: true - -# A list of domains that the domain portion of 'next_link' parameters -# must match. -# -# This parameter is optionally provided by clients while requesting -# validation of an email or phone number, and maps to a link that -# users will be automatically redirected to after validation -# succeeds. Clients can make use this parameter to aid the validation -# process. -# -# The whitelist is applied whether the homeserver or an -# identity server is handling validation. -# -# The default value is no whitelist functionality; all domains are -# allowed. Setting this value to an empty list will instead disallow -# all domains. -# -#next_link_domain_whitelist: ["matrix.org"] - - -## TLS ## - -# PEM-encoded X509 certificate for TLS. -# This certificate, as of Synapse 1.0, will need to be a valid and verifiable -# certificate, signed by a recognised Certificate Authority. -# -# See 'ACME support' below to enable auto-provisioning this certificate via -# Let's Encrypt. -# -# If supplying your own, be sure to use a `.pem` file that includes the -# full certificate chain including any intermediate certificates (for -# instance, if using certbot, use `fullchain.pem` as your certificate, -# not `cert.pem`). -# -#tls_certificate_path: "/data/synapse.tls.crt" - -# PEM-encoded private key for TLS -# -#tls_private_key_path: "/data/synapse.tls.key" - -# Whether to verify TLS server certificates for outbound federation requests. -# -# Defaults to `true`. To disable certificate verification, uncomment the -# following line. -# -#federation_verify_certificates: false - -# The minimum TLS version that will be used for outbound federation requests. -# -# Defaults to `1`. Configurable to `1`, `1.1`, `1.2`, or `1.3`. Note -# that setting this value higher than `1.2` will prevent federation to most -# of the public Matrix network: only configure it to `1.3` if you have an -# entirely private federation setup and you can ensure TLS 1.3 support. -# -#federation_client_minimum_tls_version: 1.2 - -# Skip federation certificate verification on the following whitelist -# of domains. -# -# This setting should only be used in very specific cases, such as -# federation over Tor hidden services and similar. For private networks -# of homeservers, you likely want to use a private CA instead. -# -# Only effective if federation_verify_certicates is `true`. -# -#federation_certificate_verification_whitelist: -# - lon.example.com -# - *.domain.com -# - *.onion - -# List of custom certificate authorities for federation traffic. -# -# This setting should only normally be used within a private network of -# homeservers. -# -# Note that this list will replace those that are provided by your -# operating environment. Certificates must be in PEM format. -# -#federation_custom_ca_list: -# - myCA1.pem -# - myCA2.pem -# - myCA3.pem - -# ACME support: This will configure Synapse to request a valid TLS certificate -# for your configured `server_name` via Let's Encrypt. -# -# Note that ACME v1 is now deprecated, and Synapse currently doesn't support -# ACME v2. This means that this feature currently won't work with installs set -# up after November 2019. For more info, and alternative solutions, see -# https://github.com/matrix-org/synapse/blob/master/docs/ACME.md#deprecation-of-acme-v1 -# -# Note that provisioning a certificate in this way requires port 80 to be -# routed to Synapse so that it can complete the http-01 ACME challenge. -# By default, if you enable ACME support, Synapse will attempt to listen on -# port 80 for incoming http-01 challenges - however, this will likely fail -# with 'Permission denied' or a similar error. -# -# There are a couple of potential solutions to this: -# -# * If you already have an Apache, Nginx, or similar listening on port 80, -# you can configure Synapse to use an alternate port, and have your web -# server forward the requests. For example, assuming you set 'port: 8009' -# below, on Apache, you would write: -# -# ProxyPass /.well-known/acme-challenge http://localhost:8009/.well-known/acme-challenge -# -# * Alternatively, you can use something like `authbind` to give Synapse -# permission to listen on port 80. -# -acme: - # ACME support is disabled by default. Set this to `true` and uncomment - # tls_certificate_path and tls_private_key_path above to enable it. - # - enabled: false - - # Endpoint to use to request certificates. If you only want to test, - # use Let's Encrypt's staging url: - # https://acme-staging.api.letsencrypt.org/directory - # - #url: https://acme-v01.api.letsencrypt.org/directory - - # Port number to listen on for the HTTP-01 challenge. Change this if - # you are forwarding connections through Apache/Nginx/etc. - # - port: 80 - - # Local addresses to listen on for incoming connections. - # Again, you may want to change this if you are forwarding connections - # through Apache/Nginx/etc. - # - bind_addresses: ['::', '0.0.0.0'] - - # How many days remaining on a certificate before it is renewed. - # - reprovision_threshold: 30 - - # The domain that the certificate should be for. Normally this - # should be the same as your Matrix domain (i.e., 'server_name'), but, - # by putting a file at 'https:///.well-known/matrix/server', - # you can delegate incoming traffic to another server. If you do that, - # you should give the target of the delegation here. - # - # For example: if your 'server_name' is 'example.com', but - # 'https://example.com/.well-known/matrix/server' delegates to - # 'matrix.example.com', you should put 'matrix.example.com' here. - # - # If not set, defaults to your 'server_name'. - # - domain: matrix.example.com - - # file to use for the account key. This will be generated if it doesn't - # exist. - # - # If unspecified, we will use CONFDIR/client.key. - # - account_key_file: /srv/acme_account.key - - -## Federation ## - -# Restrict federation to the following whitelist of domains. -# N.B. we recommend also firewalling your federation listener to limit -# inbound federation traffic as early as possible, rather than relying -# purely on this application-layer restriction. If not specified, the -# default is to whitelist everything. -# -#federation_domain_whitelist: -# - lon.example.com -# - nyc.example.com -# - syd.example.com - -# Report prometheus metrics on the age of PDUs being sent to and received from -# the following domains. This can be used to give an idea of "delay" on inbound -# and outbound federation, though be aware that any delay can be due to problems -# at either end or with the intermediate network. -# -# By default, no domains are monitored in this way. -# -#federation_metrics_domains: -# - matrix.org -# - example.com - -# Uncomment to disable profile lookup over federation. By default, the -# Federation API allows other homeservers to obtain profile data of any user -# on this homeserver. Defaults to 'true'. -# -#allow_profile_lookup_over_federation: false - -# Uncomment to disable device display name lookup over federation. By default, the -# Federation API allows other homeservers to obtain device display names of any user -# on this homeserver. Defaults to 'true'. -# -#allow_device_name_lookup_over_federation: false - - -## Caching ## - -# Caching can be configured through the following options. -# -# A cache 'factor' is a multiplier that can be applied to each of -# Synapse's caches in order to increase or decrease the maximum -# number of entries that can be stored. - -# The number of events to cache in memory. Not affected by -# caches.global_factor. -# -#event_cache_size: 10K - -caches: - # Controls the global cache factor, which is the default cache factor - # for all caches if a specific factor for that cache is not otherwise - # set. - # - # This can also be set by the "SYNAPSE_CACHE_FACTOR" environment - # variable. Setting by environment variable takes priority over - # setting through the config file. - # - # Defaults to 0.5, which will half the size of all caches. - # - #global_factor: 1.0 - - # A dictionary of cache name to cache factor for that individual - # cache. Overrides the global cache factor for a given cache. - # - # These can also be set through environment variables comprised - # of "SYNAPSE_CACHE_FACTOR_" + the name of the cache in capital - # letters and underscores. Setting by environment variable - # takes priority over setting through the config file. - # Ex. SYNAPSE_CACHE_FACTOR_GET_USERS_WHO_SHARE_ROOM_WITH_USER=2.0 - # - # Some caches have '*' and other characters that are not - # alphanumeric or underscores. These caches can be named with or - # without the special characters stripped. For example, to specify - # the cache factor for `*stateGroupCache*` via an environment - # variable would be `SYNAPSE_CACHE_FACTOR_STATEGROUPCACHE=2.0`. - # - per_cache_factors: - #get_users_who_share_room_with_user: 2.0 - - -## Database ## - -# The 'database' setting defines the database that synapse uses to store all of -# its data. -# -# 'name' gives the database engine to use: either 'sqlite3' (for SQLite) or -# 'psycopg2' (for PostgreSQL). -# -# 'args' gives options which are passed through to the database engine, -# except for options starting 'cp_', which are used to configure the Twisted -# connection pool. For a reference to valid arguments, see: -# * for sqlite: https://docs.python.org/3/library/sqlite3.html#sqlite3.connect -# * for postgres: https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-PARAMKEYWORDS -# * for the connection pool: https://twistedmatrix.com/documents/current/api/twisted.enterprise.adbapi.ConnectionPool.html#__init__ -# -# -# Example SQLite configuration: -# -#database: -# name: sqlite3 -# args: -# database: /path/to/homeserver.db -# -# -# Example Postgres configuration: -# -#database: -# name: psycopg2 -# args: -# user: synapse_user -# password: secretpassword -# database: synapse -# host: localhost -# port: 5432 -# cp_min: 5 -# cp_max: 10 -# -# For more information on using Synapse with Postgres, see `docs/postgres.md`. -# -database: - name: sqlite3 - args: - database: /srv/homeserver.db - - -## Logging ## - -# A yaml python logging config file as described by -# https://docs.python.org/3.7/library/logging.config.html#configuration-dictionary-schema -# -log_config: "/srv/synapse.log.config" - - -## Ratelimiting ## - -# Ratelimiting settings for client actions (registration, login, messaging). -# -# Each ratelimiting configuration is made of two parameters: -# - per_second: number of requests a client can send per second. -# - burst_count: number of requests a client can send before being throttled. -# -# Synapse currently uses the following configurations: -# - one for messages that ratelimits sending based on the account the client -# is using -# - one for registration that ratelimits registration requests based on the -# client's IP address. -# - one for login that ratelimits login requests based on the client's IP -# address. -# - one for login that ratelimits login requests based on the account the -# client is attempting to log into. -# - one for login that ratelimits login requests based on the account the -# client is attempting to log into, based on the amount of failed login -# attempts for this account. -# - one for ratelimiting redactions by room admins. If this is not explicitly -# set then it uses the same ratelimiting as per rc_message. This is useful -# to allow room admins to deal with abuse quickly. -# - two for ratelimiting number of rooms a user can join, "local" for when -# users are joining rooms the server is already in (this is cheap) vs -# "remote" for when users are trying to join rooms not on the server (which -# can be more expensive) -# - one for ratelimiting how often a user or IP can attempt to validate a 3PID. -# - two for ratelimiting how often invites can be sent in a room or to a -# specific user. -# -# The defaults are as shown below. -# -#rc_message: -# per_second: 0.2 -# burst_count: 10 -# -#rc_registration: -# per_second: 0.17 -# burst_count: 3 -# -#rc_login: -# address: -# per_second: 0.17 -# burst_count: 3 -# account: -# per_second: 0.17 -# burst_count: 3 -# failed_attempts: -# per_second: 0.17 -# burst_count: 3 -# -#rc_admin_redaction: -# per_second: 1 -# burst_count: 50 -# -#rc_joins: -# local: -# per_second: 0.1 -# burst_count: 10 -# remote: -# per_second: 0.01 -# burst_count: 10 -# -#rc_3pid_validation: -# per_second: 0.003 -# burst_count: 5 -# -#rc_invites: -# per_room: -# per_second: 0.3 -# burst_count: 10 -# per_user: -# per_second: 0.003 -# burst_count: 5 - -# Ratelimiting settings for incoming federation -# -# The rc_federation configuration is made up of the following settings: -# - window_size: window size in milliseconds -# - sleep_limit: number of federation requests from a single server in -# a window before the server will delay processing the request. -# - sleep_delay: duration in milliseconds to delay processing events -# from remote servers by if they go over the sleep limit. -# - reject_limit: maximum number of concurrent federation requests -# allowed from a single server -# - concurrent: number of federation requests to concurrently process -# from a single server -# -# The defaults are as shown below. -# -#rc_federation: -# window_size: 1000 -# sleep_limit: 10 -# sleep_delay: 500 -# reject_limit: 50 -# concurrent: 3 - -# Target outgoing federation transaction frequency for sending read-receipts, -# per-room. -# -# If we end up trying to send out more read-receipts, they will get buffered up -# into fewer transactions. -# -#federation_rr_transactions_per_room_per_second: 50 - - - -## Media Store ## - -# Enable the media store service in the Synapse master. Uncomment the -# following if you are using a separate media store worker. -# -#enable_media_repo: false - -# Directory where uploaded images and attachments are stored. -# -media_store_path: "/srv/media_store" - -# Media storage providers allow media to be stored in different -# locations. -# -#media_storage_providers: -# - module: file_system -# # Whether to store newly uploaded local files -# store_local: false -# # Whether to store newly downloaded remote files -# store_remote: false -# # Whether to wait for successful storage for local uploads -# store_synchronous: false -# config: -# directory: /mnt/some/other/directory - -# The largest allowed upload size in bytes -# -#max_upload_size: 50M - -# Maximum number of pixels that will be thumbnailed -# -#max_image_pixels: 32M - -# Whether to generate new thumbnails on the fly to precisely match -# the resolution requested by the client. If true then whenever -# a new resolution is requested by the client the server will -# generate a new thumbnail. If false the server will pick a thumbnail -# from a precalculated list. -# -#dynamic_thumbnails: false - -# List of thumbnails to precalculate when an image is uploaded. -# -#thumbnail_sizes: -# - width: 32 -# height: 32 -# method: crop -# - width: 96 -# height: 96 -# method: crop -# - width: 320 -# height: 240 -# method: scale -# - width: 640 -# height: 480 -# method: scale -# - width: 800 -# height: 600 -# method: scale - -# Is the preview URL API enabled? -# -# 'false' by default: uncomment the following to enable it (and specify a -# url_preview_ip_range_blacklist blacklist). -# -#url_preview_enabled: true - -# List of IP address CIDR ranges that the URL preview spider is denied -# from accessing. There are no defaults: you must explicitly -# specify a list for URL previewing to work. You should specify any -# internal services in your network that you do not want synapse to try -# to connect to, otherwise anyone in any Matrix room could cause your -# synapse to issue arbitrary GET requests to your internal services, -# causing serious security issues. -# -# (0.0.0.0 and :: are always blacklisted, whether or not they are explicitly -# listed here, since they correspond to unroutable addresses.) -# -# This must be specified if url_preview_enabled is set. It is recommended that -# you uncomment the following list as a starting point. -# -#url_preview_ip_range_blacklist: -# - '127.0.0.0/8' -# - '10.0.0.0/8' -# - '172.16.0.0/12' -# - '192.168.0.0/16' -# - '100.64.0.0/10' -# - '192.0.0.0/24' -# - '169.254.0.0/16' -# - '192.88.99.0/24' -# - '198.18.0.0/15' -# - '192.0.2.0/24' -# - '198.51.100.0/24' -# - '203.0.113.0/24' -# - '224.0.0.0/4' -# - '::1/128' -# - 'fe80::/10' -# - 'fc00::/7' -# - '2001:db8::/32' -# - 'ff00::/8' -# - 'fec0::/10' - -# List of IP address CIDR ranges that the URL preview spider is allowed -# to access even if they are specified in url_preview_ip_range_blacklist. -# This is useful for specifying exceptions to wide-ranging blacklisted -# target IP ranges - e.g. for enabling URL previews for a specific private -# website only visible in your network. -# -#url_preview_ip_range_whitelist: -# - '192.168.1.1' - -# Optional list of URL matches that the URL preview spider is -# denied from accessing. You should use url_preview_ip_range_blacklist -# in preference to this, otherwise someone could define a public DNS -# entry that points to a private IP address and circumvent the blacklist. -# This is more useful if you know there is an entire shape of URL that -# you know that will never want synapse to try to spider. -# -# Each list entry is a dictionary of url component attributes as returned -# by urlparse.urlsplit as applied to the absolute form of the URL. See -# https://docs.python.org/2/library/urlparse.html#urlparse.urlsplit -# The values of the dictionary are treated as an filename match pattern -# applied to that component of URLs, unless they start with a ^ in which -# case they are treated as a regular expression match. If all the -# specified component matches for a given list item succeed, the URL is -# blacklisted. -# -#url_preview_url_blacklist: -# # blacklist any URL with a username in its URI -# - username: '*' -# -# # blacklist all *.google.com URLs -# - netloc: 'google.com' -# - netloc: '*.google.com' -# -# # blacklist all plain HTTP URLs -# - scheme: 'http' -# -# # blacklist http(s)://www.acme.com/foo -# - netloc: 'www.acme.com' -# path: '/foo' -# -# # blacklist any URL with a literal IPv4 address -# - netloc: '^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$' - -# The largest allowed URL preview spidering size in bytes -# -#max_spider_size: 10M - -# A list of values for the Accept-Language HTTP header used when -# downloading webpages during URL preview generation. This allows -# Synapse to specify the preferred languages that URL previews should -# be in when communicating with remote servers. -# -# Each value is a IETF language tag; a 2-3 letter identifier for a -# language, optionally followed by subtags separated by '-', specifying -# a country or region variant. -# -# Multiple values can be provided, and a weight can be added to each by -# using quality value syntax (;q=). '*' translates to any language. -# -# Defaults to "en". -# -# Example: -# -# url_preview_accept_language: -# - en-UK -# - en-US;q=0.9 -# - fr;q=0.8 -# - *;q=0.7 -# -url_preview_accept_language: -# - en - - -## Captcha ## -# See docs/CAPTCHA_SETUP.md for full details of configuring this. - -# This homeserver's ReCAPTCHA public key. Must be specified if -# enable_registration_captcha is enabled. -# -#recaptcha_public_key: "YOUR_PUBLIC_KEY" - -# This homeserver's ReCAPTCHA private key. Must be specified if -# enable_registration_captcha is enabled. -# -#recaptcha_private_key: "YOUR_PRIVATE_KEY" - -# Uncomment to enable ReCaptcha checks when registering, preventing signup -# unless a captcha is answered. Requires a valid ReCaptcha -# public/private key. Defaults to 'false'. -# -#enable_registration_captcha: true - -# The API endpoint to use for verifying m.login.recaptcha responses. -# Defaults to "https://www.recaptcha.net/recaptcha/api/siteverify". -# -#recaptcha_siteverify_api: "https://my.recaptcha.site" - - -## TURN ## - -# The public URIs of the TURN server to give to clients -# -#turn_uris: [] - -# The shared secret used to compute passwords for the TURN server -# -#turn_shared_secret: "YOUR_SHARED_SECRET" - -# The Username and password if the TURN server needs them and -# does not use a token -# -#turn_username: "TURNSERVER_USERNAME" -#turn_password: "TURNSERVER_PASSWORD" - -# How long generated TURN credentials last -# -#turn_user_lifetime: 1h - -# Whether guests should be allowed to use the TURN server. -# This defaults to True, otherwise VoIP will be unreliable for guests. -# However, it does introduce a slight security risk as it allows users to -# connect to arbitrary endpoints without having first signed up for a -# valid account (e.g. by passing a CAPTCHA). -# -#turn_allow_guests: true - - -## Registration ## -# -# Registration can be rate-limited using the parameters in the "Ratelimiting" -# section of this file. - -# Enable registration for new users. -# -#enable_registration: false - -# Time that a user's session remains valid for, after they log in. -# -# Note that this is not currently compatible with guest logins. -# -# Note also that this is calculated at login time: changes are not applied -# retrospectively to users who have already logged in. -# -# By default, this is infinite. -# -#session_lifetime: 24h - -# The user must provide all of the below types of 3PID when registering. -# -#registrations_require_3pid: -# - email -# - msisdn - -# Explicitly disable asking for MSISDNs from the registration -# flow (overrides registrations_require_3pid if MSISDNs are set as required) -# -#disable_msisdn_registration: true - -# Mandate that users are only allowed to associate certain formats of -# 3PIDs with accounts on this server. -# -#allowed_local_3pids: -# - medium: email -# pattern: '^[^@]+@matrix\.org$' -# - medium: email -# pattern: '^[^@]+@vector\.im$' -# - medium: msisdn -# pattern: '\+44' - -# Enable 3PIDs lookup requests to identity servers from this server. -# -#enable_3pid_lookup: true - -# If set, allows registration of standard or admin accounts by anyone who -# has the shared secret, even if registration is otherwise disabled. -# -registration_shared_secret: "P_P.1Rn*Uh;2fOwJto=+FRixCws.i-k2uHle#Fhk-JBPXYS5_v" - -# Set the number of bcrypt rounds used to generate password hash. -# Larger numbers increase the work factor needed to generate the hash. -# The default number is 12 (which equates to 2^12 rounds). -# N.B. that increasing this will exponentially increase the time required -# to register or login - e.g. 24 => 2^24 rounds which will take >20 mins. -# -#bcrypt_rounds: 12 - -# Allows users to register as guests without a password/email/etc, and -# participate in rooms hosted on this server which have been made -# accessible to anonymous users. -# -#allow_guest_access: false - -# The identity server which we suggest that clients should use when users log -# in on this server. -# -# (By default, no suggestion is made, so it is left up to the client. -# This setting is ignored unless public_baseurl is also set.) -# -#default_identity_server: https://matrix.org - -# Handle threepid (email/phone etc) registration and password resets through a set of -# *trusted* identity servers. Note that this allows the configured identity server to -# reset passwords for accounts! -# -# Be aware that if `email` is not set, and SMTP options have not been -# configured in the email config block, registration and user password resets via -# email will be globally disabled. -# -# Additionally, if `msisdn` is not set, registration and password resets via msisdn -# will be disabled regardless, and users will not be able to associate an msisdn -# identifier to their account. This is due to Synapse currently not supporting -# any method of sending SMS messages on its own. -# -# To enable using an identity server for operations regarding a particular third-party -# identifier type, set the value to the URL of that identity server as shown in the -# examples below. -# -# Servers handling the these requests must answer the `/requestToken` endpoints defined -# by the Matrix Identity Service API specification: -# https://matrix.org/docs/spec/identity_service/latest -# -# If a delegate is specified, the config option public_baseurl must also be filled out. -# -account_threepid_delegates: - #email: https://example.com # Delegate email sending to example.com - #msisdn: http://localhost:8090 # Delegate SMS sending to this local process - -# Whether users are allowed to change their displayname after it has -# been initially set. Useful when provisioning users based on the -# contents of a third-party directory. -# -# Does not apply to server administrators. Defaults to 'true' -# -#enable_set_displayname: false - -# Whether users are allowed to change their avatar after it has been -# initially set. Useful when provisioning users based on the contents -# of a third-party directory. -# -# Does not apply to server administrators. Defaults to 'true' -# -#enable_set_avatar_url: false - -# Whether users can change the 3PIDs associated with their accounts -# (email address and msisdn). -# -# Defaults to 'true' -# -#enable_3pid_changes: false - -# Users who register on this homeserver will automatically be joined -# to these rooms. -# -# By default, any room aliases included in this list will be created -# as a publicly joinable room when the first user registers for the -# homeserver. This behaviour can be customised with the settings below. -# If the room already exists, make certain it is a publicly joinable -# room. The join rule of the room must be set to 'public'. -# -#auto_join_rooms: -# - "#example:example.com" - -# Where auto_join_rooms are specified, setting this flag ensures that the -# the rooms exist by creating them when the first user on the -# homeserver registers. -# -# By default the auto-created rooms are publicly joinable from any federated -# server. Use the autocreate_auto_join_rooms_federated and -# autocreate_auto_join_room_preset settings below to customise this behaviour. -# -# Setting to false means that if the rooms are not manually created, -# users cannot be auto-joined since they do not exist. -# -# Defaults to true. Uncomment the following line to disable automatically -# creating auto-join rooms. -# -#autocreate_auto_join_rooms: false - -# Whether the auto_join_rooms that are auto-created are available via -# federation. Only has an effect if autocreate_auto_join_rooms is true. -# -# Note that whether a room is federated cannot be modified after -# creation. -# -# Defaults to true: the room will be joinable from other servers. -# Uncomment the following to prevent users from other homeservers from -# joining these rooms. -# -#autocreate_auto_join_rooms_federated: false - -# The room preset to use when auto-creating one of auto_join_rooms. Only has an -# effect if autocreate_auto_join_rooms is true. -# -# This can be one of "public_chat", "private_chat", or "trusted_private_chat". -# If a value of "private_chat" or "trusted_private_chat" is used then -# auto_join_mxid_localpart must also be configured. -# -# Defaults to "public_chat", meaning that the room is joinable by anyone, including -# federated servers if autocreate_auto_join_rooms_federated is true (the default). -# Uncomment the following to require an invitation to join these rooms. -# -#autocreate_auto_join_room_preset: private_chat - -# The local part of the user id which is used to create auto_join_rooms if -# autocreate_auto_join_rooms is true. If this is not provided then the -# initial user account that registers will be used to create the rooms. -# -# The user id is also used to invite new users to any auto-join rooms which -# are set to invite-only. -# -# It *must* be configured if autocreate_auto_join_room_preset is set to -# "private_chat" or "trusted_private_chat". -# -# Note that this must be specified in order for new users to be correctly -# invited to any auto-join rooms which have been set to invite-only (either -# at the time of creation or subsequently). -# -# Note that, if the room already exists, this user must be joined and -# have the appropriate permissions to invite new members. -# -#auto_join_mxid_localpart: system - -# When auto_join_rooms is specified, setting this flag to false prevents -# guest accounts from being automatically joined to the rooms. -# -# Defaults to true. -# -#auto_join_rooms_for_guests: false - - -## Account Validity ## - -# Optional account validity configuration. This allows for accounts to be denied -# any request after a given period. -# -# Once this feature is enabled, Synapse will look for registered users without an -# expiration date at startup and will add one to every account it found using the -# current settings at that time. -# This means that, if a validity period is set, and Synapse is restarted (it will -# then derive an expiration date from the current validity period), and some time -# after that the validity period changes and Synapse is restarted, the users' -# expiration dates won't be updated unless their account is manually renewed. This -# date will be randomly selected within a range [now + period - d ; now + period], -# where d is equal to 10% of the validity period. -# -account_validity: - # The account validity feature is disabled by default. Uncomment the - # following line to enable it. - # - #enabled: true - - # The period after which an account is valid after its registration. When - # renewing the account, its validity period will be extended by this amount - # of time. This parameter is required when using the account validity - # feature. - # - #period: 6w - - # The amount of time before an account's expiry date at which Synapse will - # send an email to the account's email address with a renewal link. By - # default, no such emails are sent. - # - # If you enable this setting, you will also need to fill out the 'email' and - # 'public_baseurl' configuration sections. - # - #renew_at: 1w - - # The subject of the email sent out with the renewal link. '%(app)s' can be - # used as a placeholder for the 'app_name' parameter from the 'email' - # section. - # - # Note that the placeholder must be written '%(app)s', including the - # trailing 's'. - # - # If this is not set, a default value is used. - # - #renew_email_subject: "Renew your %(app)s account" - - # Directory in which Synapse will try to find templates for the HTML files to - # serve to the user when trying to renew an account. If not set, default - # templates from within the Synapse package will be used. - # - # The currently available templates are: - # - # * account_renewed.html: Displayed to the user after they have successfully - # renewed their account. - # - # * account_previously_renewed.html: Displayed to the user if they attempt to - # renew their account with a token that is valid, but that has already - # been used. In this case the account is not renewed again. - # - # * invalid_token.html: Displayed to the user when they try to renew an account - # with an unknown or invalid renewal token. - # - # See https://github.com/matrix-org/synapse/tree/master/synapse/res/templates for - # default template contents. - # - # The file name of some of these templates can be configured below for legacy - # reasons. - # - #template_dir: "res/templates" - - # A custom file name for the 'account_renewed.html' template. - # - # If not set, the file is assumed to be named "account_renewed.html". - # - #account_renewed_html_path: "account_renewed.html" - - # A custom file name for the 'invalid_token.html' template. - # - # If not set, the file is assumed to be named "invalid_token.html". - # - #invalid_token_html_path: "invalid_token.html" - - -## Metrics ### - -# Enable collection and rendering of performance metrics -# -#enable_metrics: false - -# Enable sentry integration -# NOTE: While attempts are made to ensure that the logs don't contain -# any sensitive information, this cannot be guaranteed. By enabling -# this option the sentry server may therefore receive sensitive -# information, and it in turn may then diseminate sensitive information -# through insecure notification channels if so configured. -# -#sentry: -# dsn: "..." - -# Flags to enable Prometheus metrics which are not suitable to be -# enabled by default, either for performance reasons or limited use. -# -metrics_flags: - # Publish synapse_federation_known_servers, a gauge of the number of - # servers this homeserver knows about, including itself. May cause - # performance problems on large homeservers. - # - #known_servers: true - -# Whether or not to report anonymized homeserver usage statistics. -# -report_stats: false - -# The endpoint to report the anonymized homeserver usage statistics to. -# Defaults to https://matrix.org/report-usage-stats/push -# -#report_stats_endpoint: https://example.com/report-usage-stats/push - - -## API Configuration ## - -# Controls for the state that is shared with users who receive an invite -# to a room -# -room_prejoin_state: - # By default, the following state event types are shared with users who - # receive invites to the room: - # - # - m.room.join_rules - # - m.room.canonical_alias - # - m.room.avatar - # - m.room.encryption - # - m.room.name - # - m.room.create - # - # Uncomment the following to disable these defaults (so that only the event - # types listed in 'additional_event_types' are shared). Defaults to 'false'. - # - #disable_default_event_types: true - - # Additional state event types to share with users when they are invited - # to a room. - # - # By default, this list is empty (so only the default event types are shared). - # - #additional_event_types: - # - org.example.custom.event.type - - -# A list of application service config files to use -# -#app_service_config_files: -# - app_service_1.yaml -# - app_service_2.yaml - -# Uncomment to enable tracking of application service IP addresses. Implicitly -# enables MAU tracking for application service users. -# -#track_appservice_user_ips: true - - -# a secret which is used to sign access tokens. If none is specified, -# the registration_shared_secret is used, if one is given; otherwise, -# a secret key is derived from the signing key. -# -macaroon_secret_key: "#fcFaqplV,^c5:CmYAfKdEGcHqZ7YKSuS&Gq0DFw3BO@crX;pr" - -# a secret which is used to calculate HMACs for form values, to stop -# falsification of values. Must be specified for the User Consent -# forms to work. -# -form_secret: "yLW&wmKAN+7IOFfRkS5MnSOuHa2ur7&N~NJOqe46PRwiI*s.#_" - -## Signing Keys ## - -# Path to the signing key to sign messages with -# -signing_key_path: "/srv/synapse.signing.key" - -# The keys that the server used to sign messages with but won't use -# to sign new messages. -# -old_signing_keys: - # For each key, `key` should be the base64-encoded public key, and - # `expired_ts`should be the time (in milliseconds since the unix epoch) that - # it was last used. - # - # It is possible to build an entry from an old signing.key file using the - # `export_signing_key` script which is provided with synapse. - # - # For example: - # - #"ed25519:id": { key: "base64string", expired_ts: 123456789123 } - -# How long key response published by this server is valid for. -# Used to set the valid_until_ts in /key/v2 APIs. -# Determines how quickly servers will query to check which keys -# are still valid. -# -#key_refresh_interval: 1d - -# The trusted servers to download signing keys from. -# -# When we need to fetch a signing key, each server is tried in parallel. -# -# Normally, the connection to the key server is validated via TLS certificates. -# Additional security can be provided by configuring a `verify key`, which -# will make synapse check that the response is signed by that key. -# -# This setting supercedes an older setting named `perspectives`. The old format -# is still supported for backwards-compatibility, but it is deprecated. -# -# 'trusted_key_servers' defaults to matrix.org, but using it will generate a -# warning on start-up. To suppress this warning, set -# 'suppress_key_server_warning' to true. -# -# Options for each entry in the list include: -# -# server_name: the name of the server. required. -# -# verify_keys: an optional map from key id to base64-encoded public key. -# If specified, we will check that the response is signed by at least -# one of the given keys. -# -# accept_keys_insecurely: a boolean. Normally, if `verify_keys` is unset, -# and federation_verify_certificates is not `true`, synapse will refuse -# to start, because this would allow anyone who can spoof DNS responses -# to masquerade as the trusted key server. If you know what you are doing -# and are sure that your network environment provides a secure connection -# to the key server, you can set this to `true` to override this -# behaviour. -# -# An example configuration might look like: -# -#trusted_key_servers: -# - server_name: "my_trusted_server.example.com" -# verify_keys: -# "ed25519:auto": "abcdefghijklmnopqrstuvwxyzabcdefghijklmopqr" -# - server_name: "my_other_trusted_server.example.com" -# -trusted_key_servers: - - server_name: "matrix.org" - -# Uncomment the following to disable the warning that is emitted when the -# trusted_key_servers include 'matrix.org'. See above. -# -suppress_key_server_warning: true - -# The signing keys to use when acting as a trusted key server. If not specified -# defaults to the server signing key. -# -# Can contain multiple keys, one per line. -# -#key_server_signing_keys_path: "key_server_signing_keys.key" - - -## Single sign-on integration ## - -# The following settings can be used to make Synapse use a single sign-on -# provider for authentication, instead of its internal password database. -# -# You will probably also want to set the following options to `false` to -# disable the regular login/registration flows: -# * enable_registration -# * password_config.enabled -# -# You will also want to investigate the settings under the "sso" configuration -# section below. - -# Enable SAML2 for registration and login. Uses pysaml2. -# -# At least one of `sp_config` or `config_path` must be set in this section to -# enable SAML login. -# -# Once SAML support is enabled, a metadata file will be exposed at -# https://:/_synapse/client/saml2/metadata.xml, which you may be able to -# use to configure your SAML IdP with. Alternatively, you can manually configure -# the IdP to use an ACS location of -# https://:/_synapse/client/saml2/authn_response. -# -saml2_config: - # `sp_config` is the configuration for the pysaml2 Service Provider. - # See pysaml2 docs for format of config. - # - # Default values will be used for the 'entityid' and 'service' settings, - # so it is not normally necessary to specify them unless you need to - # override them. - # - sp_config: - # Point this to the IdP's metadata. You must provide either a local - # file via the `local` attribute or (preferably) a URL via the - # `remote` attribute. - # - #metadata: - # local: ["saml2/idp.xml"] - # remote: - # - url: https://our_idp/metadata.xml - - # Allowed clock difference in seconds between the homeserver and IdP. - # - # Uncomment the below to increase the accepted time difference from 0 to 3 seconds. - # - #accepted_time_diff: 3 - - # By default, the user has to go to our login page first. If you'd like - # to allow IdP-initiated login, set 'allow_unsolicited: true' in a - # 'service.sp' section: - # - #service: - # sp: - # allow_unsolicited: true - - # The examples below are just used to generate our metadata xml, and you - # may well not need them, depending on your setup. Alternatively you - # may need a whole lot more detail - see the pysaml2 docs! - - #description: ["My awesome SP", "en"] - #name: ["Test SP", "en"] - - #ui_info: - # display_name: - # - lang: en - # text: "Display Name is the descriptive name of your service." - # description: - # - lang: en - # text: "Description should be a short paragraph explaining the purpose of the service." - # information_url: - # - lang: en - # text: "https://example.com/terms-of-service" - # privacy_statement_url: - # - lang: en - # text: "https://example.com/privacy-policy" - # keywords: - # - lang: en - # text: ["Matrix", "Element"] - # logo: - # - lang: en - # text: "https://example.com/logo.svg" - # width: "200" - # height: "80" - - #organization: - # name: Example com - # display_name: - # - ["Example co", "en"] - # url: "http://example.com" - - #contact_person: - # - given_name: Bob - # sur_name: "the Sysadmin" - # email_address": ["admin@example.com"] - # contact_type": technical - - # Instead of putting the config inline as above, you can specify a - # separate pysaml2 configuration file: - # - #config_path: "/data/sp_conf.py" - - # The lifetime of a SAML session. This defines how long a user has to - # complete the authentication process, if allow_unsolicited is unset. - # The default is 15 minutes. - # - #saml_session_lifetime: 5m - - # An external module can be provided here as a custom solution to - # mapping attributes returned from a saml provider onto a matrix user. - # - user_mapping_provider: - # The custom module's class. Uncomment to use a custom module. - # - #module: mapping_provider.SamlMappingProvider - - # Custom configuration values for the module. Below options are - # intended for the built-in provider, they should be changed if - # using a custom module. This section will be passed as a Python - # dictionary to the module's `parse_config` method. - # - config: - # The SAML attribute (after mapping via the attribute maps) to use - # to derive the Matrix ID from. 'uid' by default. - # - # Note: This used to be configured by the - # saml2_config.mxid_source_attribute option. If that is still - # defined, its value will be used instead. - # - #mxid_source_attribute: displayName - - # The mapping system to use for mapping the saml attribute onto a - # matrix ID. - # - # Options include: - # * 'hexencode' (which maps unpermitted characters to '=xx') - # * 'dotreplace' (which replaces unpermitted characters with - # '.'). - # The default is 'hexencode'. - # - # Note: This used to be configured by the - # saml2_config.mxid_mapping option. If that is still defined, its - # value will be used instead. - # - #mxid_mapping: dotreplace - - # In previous versions of synapse, the mapping from SAML attribute to - # MXID was always calculated dynamically rather than stored in a - # table. For backwards- compatibility, we will look for user_ids - # matching such a pattern before creating a new account. - # - # This setting controls the SAML attribute which will be used for this - # backwards-compatibility lookup. Typically it should be 'uid', but if - # the attribute maps are changed, it may be necessary to change it. - # - # The default is 'uid'. - # - #grandfathered_mxid_source_attribute: upn - - # It is possible to configure Synapse to only allow logins if SAML attributes - # match particular values. The requirements can be listed under - # `attribute_requirements` as shown below. All of the listed attributes must - # match for the login to be permitted. - # - #attribute_requirements: - # - attribute: userGroup - # value: "staff" - # - attribute: department - # value: "sales" - - # If the metadata XML contains multiple IdP entities then the `idp_entityid` - # option must be set to the entity to redirect users to. - # - # Most deployments only have a single IdP entity and so should omit this - # option. - # - #idp_entityid: 'https://our_idp/entityid' - - -# List of OpenID Connect (OIDC) / OAuth 2.0 identity providers, for registration -# and login. -# -# Options for each entry include: -# -# idp_id: a unique identifier for this identity provider. Used internally -# by Synapse; should be a single word such as 'github'. -# -# Note that, if this is changed, users authenticating via that provider -# will no longer be recognised as the same user! -# -# (Use "oidc" here if you are migrating from an old "oidc_config" -# configuration.) -# -# idp_name: A user-facing name for this identity provider, which is used to -# offer the user a choice of login mechanisms. -# -# idp_icon: An optional icon for this identity provider, which is presented -# by clients and Synapse's own IdP picker page. If given, must be an -# MXC URI of the format mxc:///. (An easy way to -# obtain such an MXC URI is to upload an image to an (unencrypted) room -# and then copy the "url" from the source of the event.) -# -# idp_brand: An optional brand for this identity provider, allowing clients -# to style the login flow according to the identity provider in question. -# See the spec for possible options here. -# -# discover: set to 'false' to disable the use of the OIDC discovery mechanism -# to discover endpoints. Defaults to true. -# -# issuer: Required. The OIDC issuer. Used to validate tokens and (if discovery -# is enabled) to discover the provider's endpoints. -# -# client_id: Required. oauth2 client id to use. -# -# client_secret: oauth2 client secret to use. May be omitted if -# client_secret_jwt_key is given, or if client_auth_method is 'none'. -# -# client_secret_jwt_key: Alternative to client_secret: details of a key used -# to create a JSON Web Token to be used as an OAuth2 client secret. If -# given, must be a dictionary with the following properties: -# -# key: a pem-encoded signing key. Must be a suitable key for the -# algorithm specified. Required unless 'key_file' is given. -# -# key_file: the path to file containing a pem-encoded signing key file. -# Required unless 'key' is given. -# -# jwt_header: a dictionary giving properties to include in the JWT -# header. Must include the key 'alg', giving the algorithm used to -# sign the JWT, such as "ES256", using the JWA identifiers in -# RFC7518. -# -# jwt_payload: an optional dictionary giving properties to include in -# the JWT payload. Normally this should include an 'iss' key. -# -# client_auth_method: auth method to use when exchanging the token. Valid -# values are 'client_secret_basic' (default), 'client_secret_post' and -# 'none'. -# -# scopes: list of scopes to request. This should normally include the "openid" -# scope. Defaults to ["openid"]. -# -# authorization_endpoint: the oauth2 authorization endpoint. Required if -# provider discovery is disabled. -# -# token_endpoint: the oauth2 token endpoint. Required if provider discovery is -# disabled. -# -# userinfo_endpoint: the OIDC userinfo endpoint. Required if discovery is -# disabled and the 'openid' scope is not requested. -# -# jwks_uri: URI where to fetch the JWKS. Required if discovery is disabled and -# the 'openid' scope is used. -# -# skip_verification: set to 'true' to skip metadata verification. Use this if -# you are connecting to a provider that is not OpenID Connect compliant. -# Defaults to false. Avoid this in production. -# -# user_profile_method: Whether to fetch the user profile from the userinfo -# endpoint. Valid values are: 'auto' or 'userinfo_endpoint'. -# -# Defaults to 'auto', which fetches the userinfo endpoint if 'openid' is -# included in 'scopes'. Set to 'userinfo_endpoint' to always fetch the -# userinfo endpoint. -# -# allow_existing_users: set to 'true' to allow a user logging in via OIDC to -# match a pre-existing account instead of failing. This could be used if -# switching from password logins to OIDC. Defaults to false. -# -# user_mapping_provider: Configuration for how attributes returned from a OIDC -# provider are mapped onto a matrix user. This setting has the following -# sub-properties: -# -# module: The class name of a custom mapping module. Default is -# 'synapse.handlers.oidc.JinjaOidcMappingProvider'. -# See https://github.com/matrix-org/synapse/blob/master/docs/sso_mapping_providers.md#openid-mapping-providers -# for information on implementing a custom mapping provider. -# -# config: Configuration for the mapping provider module. This section will -# be passed as a Python dictionary to the user mapping provider -# module's `parse_config` method. -# -# For the default provider, the following settings are available: -# -# subject_claim: name of the claim containing a unique identifier -# for the user. Defaults to 'sub', which OpenID Connect -# compliant providers should provide. -# -# localpart_template: Jinja2 template for the localpart of the MXID. -# If this is not set, the user will be prompted to choose their -# own username (see 'sso_auth_account_details.html' in the 'sso' -# section of this file). -# -# display_name_template: Jinja2 template for the display name to set -# on first login. If unset, no displayname will be set. -# -# email_template: Jinja2 template for the email address of the user. -# If unset, no email address will be added to the account. -# -# extra_attributes: a map of Jinja2 templates for extra attributes -# to send back to the client during login. -# Note that these are non-standard and clients will ignore them -# without modifications. -# -# When rendering, the Jinja2 templates are given a 'user' variable, -# which is set to the claims returned by the UserInfo Endpoint and/or -# in the ID Token. -# -# It is possible to configure Synapse to only allow logins if certain attributes -# match particular values in the OIDC userinfo. The requirements can be listed under -# `attribute_requirements` as shown below. All of the listed attributes must -# match for the login to be permitted. Additional attributes can be added to -# userinfo by expanding the `scopes` section of the OIDC config to retrieve -# additional information from the OIDC provider. -# -# If the OIDC claim is a list, then the attribute must match any value in the list. -# Otherwise, it must exactly match the value of the claim. Using the example -# below, the `family_name` claim MUST be "Stephensson", but the `groups` -# claim MUST contain "admin". -# -# attribute_requirements: -# - attribute: family_name -# value: "Stephensson" -# - attribute: groups -# value: "admin" -# -# See https://github.com/matrix-org/synapse/blob/master/docs/openid.md -# for information on how to configure these options. -# -# For backwards compatibility, it is also possible to configure a single OIDC -# provider via an 'oidc_config' setting. This is now deprecated and admins are -# advised to migrate to the 'oidc_providers' format. (When doing that migration, -# use 'oidc' for the idp_id to ensure that existing users continue to be -# recognised.) -# -oidc_providers: - # Generic example - # - #- idp_id: my_idp - # idp_name: "My OpenID provider" - # idp_icon: "mxc://example.com/mediaid" - # discover: false - # issuer: "https://accounts.example.com/" - # client_id: "provided-by-your-issuer" - # client_secret: "provided-by-your-issuer" - # client_auth_method: client_secret_post - # scopes: ["openid", "profile"] - # authorization_endpoint: "https://accounts.example.com/oauth2/auth" - # token_endpoint: "https://accounts.example.com/oauth2/token" - # userinfo_endpoint: "https://accounts.example.com/userinfo" - # jwks_uri: "https://accounts.example.com/.well-known/jwks.json" - # skip_verification: true - # user_mapping_provider: - # config: - # subject_claim: "id" - # localpart_template: "{{ user.login }}" - # display_name_template: "{{ user.name }}" - # email_template: "{{ user.email }}" - # attribute_requirements: - # - attribute: userGroup - # value: "synapseUsers" - - -# Enable Central Authentication Service (CAS) for registration and login. -# -cas_config: - # Uncomment the following to enable authorization against a CAS server. - # Defaults to false. - # - #enabled: true - - # The URL of the CAS authorization endpoint. - # - #server_url: "https://cas-server.com" - - # The attribute of the CAS response to use as the display name. - # - # If unset, no displayname will be set. - # - #displayname_attribute: name - - # It is possible to configure Synapse to only allow logins if CAS attributes - # match particular values. All of the keys in the mapping below must exist - # and the values must match the given value. Alternately if the given value - # is None then any value is allowed (the attribute just must exist). - # All of the listed attributes must match for the login to be permitted. - # - #required_attributes: - # userGroup: "staff" - # department: None - - -# Additional settings to use with single-sign on systems such as OpenID Connect, -# SAML2 and CAS. -# -sso: - # A list of client URLs which are whitelisted so that the user does not - # have to confirm giving access to their account to the URL. Any client - # whose URL starts with an entry in the following list will not be subject - # to an additional confirmation step after the SSO login is completed. - # - # WARNING: An entry such as "https://my.client" is insecure, because it - # will also match "https://my.client.evil.site", exposing your users to - # phishing attacks from evil.site. To avoid this, include a slash after the - # hostname: "https://my.client/". - # - # If public_baseurl is set, then the login fallback page (used by clients - # that don't natively support the required login flows) is whitelisted in - # addition to any URLs in this list. - # - # By default, this list is empty. - # - #client_whitelist: - # - https://riot.im/develop - # - https://my.custom.client/ - - # Directory in which Synapse will try to find the template files below. - # If not set, or the files named below are not found within the template - # directory, default templates from within the Synapse package will be used. - # - # Synapse will look for the following templates in this directory: - # - # * HTML page to prompt the user to choose an Identity Provider during - # login: 'sso_login_idp_picker.html'. - # - # This is only used if multiple SSO Identity Providers are configured. - # - # When rendering, this template is given the following variables: - # * redirect_url: the URL that the user will be redirected to after - # login. - # - # * server_name: the homeserver's name. - # - # * providers: a list of available Identity Providers. Each element is - # an object with the following attributes: - # - # * idp_id: unique identifier for the IdP - # * idp_name: user-facing name for the IdP - # * idp_icon: if specified in the IdP config, an MXC URI for an icon - # for the IdP - # * idp_brand: if specified in the IdP config, a textual identifier - # for the brand of the IdP - # - # The rendered HTML page should contain a form which submits its results - # back as a GET request, with the following query parameters: - # - # * redirectUrl: the client redirect URI (ie, the `redirect_url` passed - # to the template) - # - # * idp: the 'idp_id' of the chosen IDP. - # - # * HTML page to prompt new users to enter a userid and confirm other - # details: 'sso_auth_account_details.html'. This is only shown if the - # SSO implementation (with any user_mapping_provider) does not return - # a localpart. - # - # When rendering, this template is given the following variables: - # - # * server_name: the homeserver's name. - # - # * idp: details of the SSO Identity Provider that the user logged in - # with: an object with the following attributes: - # - # * idp_id: unique identifier for the IdP - # * idp_name: user-facing name for the IdP - # * idp_icon: if specified in the IdP config, an MXC URI for an icon - # for the IdP - # * idp_brand: if specified in the IdP config, a textual identifier - # for the brand of the IdP - # - # * user_attributes: an object containing details about the user that - # we received from the IdP. May have the following attributes: - # - # * display_name: the user's display_name - # * emails: a list of email addresses - # - # The template should render a form which submits the following fields: - # - # * username: the localpart of the user's chosen user id - # - # * HTML page allowing the user to consent to the server's terms and - # conditions. This is only shown for new users, and only if - # `user_consent.require_at_registration` is set. - # - # When rendering, this template is given the following variables: - # - # * server_name: the homeserver's name. - # - # * user_id: the user's matrix proposed ID. - # - # * user_profile.display_name: the user's proposed display name, if any. - # - # * consent_version: the version of the terms that the user will be - # shown - # - # * terms_url: a link to the page showing the terms. - # - # The template should render a form which submits the following fields: - # - # * accepted_version: the version of the terms accepted by the user - # (ie, 'consent_version' from the input variables). - # - # * HTML page for a confirmation step before redirecting back to the client - # with the login token: 'sso_redirect_confirm.html'. - # - # When rendering, this template is given the following variables: - # - # * redirect_url: the URL the user is about to be redirected to. - # - # * display_url: the same as `redirect_url`, but with the query - # parameters stripped. The intention is to have a - # human-readable URL to show to users, not to use it as - # the final address to redirect to. - # - # * server_name: the homeserver's name. - # - # * new_user: a boolean indicating whether this is the user's first time - # logging in. - # - # * user_id: the user's matrix ID. - # - # * user_profile.avatar_url: an MXC URI for the user's avatar, if any. - # None if the user has not set an avatar. - # - # * user_profile.display_name: the user's display name. None if the user - # has not set a display name. - # - # * HTML page which notifies the user that they are authenticating to confirm - # an operation on their account during the user interactive authentication - # process: 'sso_auth_confirm.html'. - # - # When rendering, this template is given the following variables: - # * redirect_url: the URL the user is about to be redirected to. - # - # * description: the operation which the user is being asked to confirm - # - # * idp: details of the Identity Provider that we will use to confirm - # the user's identity: an object with the following attributes: - # - # * idp_id: unique identifier for the IdP - # * idp_name: user-facing name for the IdP - # * idp_icon: if specified in the IdP config, an MXC URI for an icon - # for the IdP - # * idp_brand: if specified in the IdP config, a textual identifier - # for the brand of the IdP - # - # * HTML page shown after a successful user interactive authentication session: - # 'sso_auth_success.html'. - # - # Note that this page must include the JavaScript which notifies of a successful authentication - # (see https://matrix.org/docs/spec/client_server/r0.6.0#fallback). - # - # This template has no additional variables. - # - # * HTML page shown after a user-interactive authentication session which - # does not map correctly onto the expected user: 'sso_auth_bad_user.html'. - # - # When rendering, this template is given the following variables: - # * server_name: the homeserver's name. - # * user_id_to_verify: the MXID of the user that we are trying to - # validate. - # - # * HTML page shown during single sign-on if a deactivated user (according to Synapse's database) - # attempts to login: 'sso_account_deactivated.html'. - # - # This template has no additional variables. - # - # * HTML page to display to users if something goes wrong during the - # OpenID Connect authentication process: 'sso_error.html'. - # - # When rendering, this template is given two variables: - # * error: the technical name of the error - # * error_description: a human-readable message for the error - # - # You can see the default templates at: - # https://github.com/matrix-org/synapse/tree/master/synapse/res/templates - # - #template_dir: "res/templates" - - -# JSON web token integration. The following settings can be used to make -# Synapse JSON web tokens for authentication, instead of its internal -# password database. -# -# Each JSON Web Token needs to contain a "sub" (subject) claim, which is -# used as the localpart of the mxid. -# -# Additionally, the expiration time ("exp"), not before time ("nbf"), -# and issued at ("iat") claims are validated if present. -# -# Note that this is a non-standard login type and client support is -# expected to be non-existent. -# -# See https://github.com/matrix-org/synapse/blob/master/docs/jwt.md. -# -#jwt_config: - # Uncomment the following to enable authorization using JSON web - # tokens. Defaults to false. - # - #enabled: true - - # This is either the private shared secret or the public key used to - # decode the contents of the JSON web token. - # - # Required if 'enabled' is true. - # - #secret: "provided-by-your-issuer" - - # The algorithm used to sign the JSON web token. - # - # Supported algorithms are listed at - # https://pyjwt.readthedocs.io/en/latest/algorithms.html - # - # Required if 'enabled' is true. - # - #algorithm: "provided-by-your-issuer" - - # The issuer to validate the "iss" claim against. - # - # Optional, if provided the "iss" claim will be required and - # validated for all JSON web tokens. - # - #issuer: "provided-by-your-issuer" - - # A list of audiences to validate the "aud" claim against. - # - # Optional, if provided the "aud" claim will be required and - # validated for all JSON web tokens. - # - # Note that if the "aud" claim is included in a JSON web token then - # validation will fail without configuring audiences. - # - #audiences: - # - "provided-by-your-issuer" - - -password_config: - # Uncomment to disable password login - # - #enabled: false - - # Uncomment to disable authentication against the local password - # database. This is ignored if `enabled` is false, and is only useful - # if you have other password_providers. - # - #localdb_enabled: false - - # Uncomment and change to a secret random string for extra security. - # DO NOT CHANGE THIS AFTER INITIAL SETUP! - # - #pepper: "EVEN_MORE_SECRET" - - # Define and enforce a password policy. Each parameter is optional. - # This is an implementation of MSC2000. - # - policy: - # Whether to enforce the password policy. - # Defaults to 'false'. - # - #enabled: true - - # Minimum accepted length for a password. - # Defaults to 0. - # - #minimum_length: 15 - - # Whether a password must contain at least one digit. - # Defaults to 'false'. - # - #require_digit: true - - # Whether a password must contain at least one symbol. - # A symbol is any character that's not a number or a letter. - # Defaults to 'false'. - # - #require_symbol: true - - # Whether a password must contain at least one lowercase letter. - # Defaults to 'false'. - # - #require_lowercase: true - - # Whether a password must contain at least one lowercase letter. - # Defaults to 'false'. - # - #require_uppercase: true - -ui_auth: - # The amount of time to allow a user-interactive authentication session - # to be active. - # - # This defaults to 0, meaning the user is queried for their credentials - # before every action, but this can be overridden to allow a single - # validation to be re-used. This weakens the protections afforded by - # the user-interactive authentication process, by allowing for multiple - # (and potentially different) operations to use the same validation session. - # - # Uncomment below to allow for credential validation to last for 15 - # seconds. - # - #session_timeout: "15s" - - -# Configuration for sending emails from Synapse. -# -email: - # The hostname of the outgoing SMTP server to use. Defaults to 'localhost'. - # - #smtp_host: mail.server - - # The port on the mail server for outgoing SMTP. Defaults to 25. - # - #smtp_port: 587 - - # Username/password for authentication to the SMTP server. By default, no - # authentication is attempted. - # - #smtp_user: "exampleusername" - #smtp_pass: "examplepassword" - - # Uncomment the following to require TLS transport security for SMTP. - # By default, Synapse will connect over plain text, and will then switch to - # TLS via STARTTLS *if the SMTP server supports it*. If this option is set, - # Synapse will refuse to connect unless the server supports STARTTLS. - # - #require_transport_security: true - - # notif_from defines the "From" address to use when sending emails. - # It must be set if email sending is enabled. - # - # The placeholder '%(app)s' will be replaced by the application name, - # which is normally 'app_name' (below), but may be overridden by the - # Matrix client application. - # - # Note that the placeholder must be written '%(app)s', including the - # trailing 's'. - # - #notif_from: "Your Friendly %(app)s homeserver " - - # app_name defines the default value for '%(app)s' in notif_from and email - # subjects. It defaults to 'Matrix'. - # - #app_name: my_branded_matrix_server - - # Uncomment the following to enable sending emails for messages that the user - # has missed. Disabled by default. - # - #enable_notifs: true - - # Uncomment the following to disable automatic subscription to email - # notifications for new users. Enabled by default. - # - #notif_for_new_users: false - - # Custom URL for client links within the email notifications. By default - # links will be based on "https://matrix.to". - # - # (This setting used to be called riot_base_url; the old name is still - # supported for backwards-compatibility but is now deprecated.) - # - #client_base_url: "http://localhost/riot" - - # Configure the time that a validation email will expire after sending. - # Defaults to 1h. - # - #validation_token_lifetime: 15m - - # The web client location to direct users to during an invite. This is passed - # to the identity server as the org.matrix.web_client_location key. Defaults - # to unset, giving no guidance to the identity server. - # - #invite_client_location: https://app.element.io - - # Directory in which Synapse will try to find the template files below. - # If not set, or the files named below are not found within the template - # directory, default templates from within the Synapse package will be used. - # - # Synapse will look for the following templates in this directory: - # - # * The contents of email notifications of missed events: 'notif_mail.html' and - # 'notif_mail.txt'. - # - # * The contents of account expiry notice emails: 'notice_expiry.html' and - # 'notice_expiry.txt'. - # - # * The contents of password reset emails sent by the homeserver: - # 'password_reset.html' and 'password_reset.txt' - # - # * An HTML page that a user will see when they follow the link in the password - # reset email. The user will be asked to confirm the action before their - # password is reset: 'password_reset_confirmation.html' - # - # * HTML pages for success and failure that a user will see when they confirm - # the password reset flow using the page above: 'password_reset_success.html' - # and 'password_reset_failure.html' - # - # * The contents of address verification emails sent during registration: - # 'registration.html' and 'registration.txt' - # - # * HTML pages for success and failure that a user will see when they follow - # the link in an address verification email sent during registration: - # 'registration_success.html' and 'registration_failure.html' - # - # * The contents of address verification emails sent when an address is added - # to a Matrix account: 'add_threepid.html' and 'add_threepid.txt' - # - # * HTML pages for success and failure that a user will see when they follow - # the link in an address verification email sent when an address is added - # to a Matrix account: 'add_threepid_success.html' and - # 'add_threepid_failure.html' - # - # You can see the default templates at: - # https://github.com/matrix-org/synapse/tree/master/synapse/res/templates - # - #template_dir: "res/templates" - - # Subjects to use when sending emails from Synapse. - # - # The placeholder '%(app)s' will be replaced with the value of the 'app_name' - # setting above, or by a value dictated by the Matrix client application. - # - # If a subject isn't overridden in this configuration file, the value used as - # its example will be used. - # - #subjects: - - # Subjects for notification emails. - # - # On top of the '%(app)s' placeholder, these can use the following - # placeholders: - # - # * '%(person)s', which will be replaced by the display name of the user(s) - # that sent the message(s), e.g. "Alice and Bob". - # * '%(room)s', which will be replaced by the name of the room the - # message(s) have been sent to, e.g. "My super room". - # - # See the example provided for each setting to see which placeholder can be - # used and how to use them. - # - # Subject to use to notify about one message from one or more user(s) in a - # room which has a name. - #message_from_person_in_room: "[%(app)s] You have a message on %(app)s from %(person)s in the %(room)s room..." - # - # Subject to use to notify about one message from one or more user(s) in a - # room which doesn't have a name. - #message_from_person: "[%(app)s] You have a message on %(app)s from %(person)s..." - # - # Subject to use to notify about multiple messages from one or more users in - # a room which doesn't have a name. - #messages_from_person: "[%(app)s] You have messages on %(app)s from %(person)s..." - # - # Subject to use to notify about multiple messages in a room which has a - # name. - #messages_in_room: "[%(app)s] You have messages on %(app)s in the %(room)s room..." - # - # Subject to use to notify about multiple messages in multiple rooms. - #messages_in_room_and_others: "[%(app)s] You have messages on %(app)s in the %(room)s room and others..." - # - # Subject to use to notify about multiple messages from multiple persons in - # multiple rooms. This is similar to the setting above except it's used when - # the room in which the notification was triggered has no name. - #messages_from_person_and_others: "[%(app)s] You have messages on %(app)s from %(person)s and others..." - # - # Subject to use to notify about an invite to a room which has a name. - #invite_from_person_to_room: "[%(app)s] %(person)s has invited you to join the %(room)s room on %(app)s..." - # - # Subject to use to notify about an invite to a room which doesn't have a - # name. - #invite_from_person: "[%(app)s] %(person)s has invited you to chat on %(app)s..." - - # Subject for emails related to account administration. - # - # On top of the '%(app)s' placeholder, these one can use the - # '%(server_name)s' placeholder, which will be replaced by the value of the - # 'server_name' setting in your Synapse configuration. - # - # Subject to use when sending a password reset email. - #password_reset: "[%(server_name)s] Password reset" - # - # Subject to use when sending a verification email to assert an address's - # ownership. - #email_validation: "[%(server_name)s] Validate your email" - - -# Password providers allow homeserver administrators to integrate -# their Synapse installation with existing authentication methods -# ex. LDAP, external tokens, etc. -# -# For more information and known implementations, please see -# https://github.com/matrix-org/synapse/blob/master/docs/password_auth_providers.md -# -# Note: instances wishing to use SAML or CAS authentication should -# instead use the `saml2_config` or `cas_config` options, -# respectively. -# -password_providers: -# # Example config for an LDAP auth provider -# - module: "ldap_auth_provider.LdapAuthProvider" -# config: -# enabled: true -# uri: "ldap://ldap.example.com:389" -# start_tls: true -# base: "ou=users,dc=example,dc=com" -# attributes: -# uid: "cn" -# mail: "email" -# name: "givenName" -# #bind_dn: -# #bind_password: -# #filter: "(objectClass=posixAccount)" - - - -## Push ## - -push: - # Clients requesting push notifications can either have the body of - # the message sent in the notification poke along with other details - # like the sender, or just the event ID and room ID (`event_id_only`). - # If clients choose the former, this option controls whether the - # notification request includes the content of the event (other details - # like the sender are still included). For `event_id_only` push, it - # has no effect. - # - # For modern android devices the notification content will still appear - # because it is loaded by the app. iPhone, however will send a - # notification saying only that a message arrived and who it came from. - # - # The default value is "true" to include message details. Uncomment to only - # include the event ID and room ID in push notification payloads. - # - #include_content: false - - # When a push notification is received, an unread count is also sent. - # This number can either be calculated as the number of unread messages - # for the user, or the number of *rooms* the user has unread messages in. - # - # The default value is "true", meaning push clients will see the number of - # rooms with unread messages in them. Uncomment to instead send the number - # of unread messages. - # - #group_unread_count_by_room: false - - -# Spam checkers are third-party modules that can block specific actions -# of local users, such as creating rooms and registering undesirable -# usernames, as well as remote users by redacting incoming events. -# -spam_checker: - #- module: "my_custom_project.SuperSpamChecker" - # config: - # example_option: 'things' - #- module: "some_other_project.BadEventStopper" - # config: - # example_stop_events_from: ['@bad:example.com'] - - -## Rooms ## - -# Controls whether locally-created rooms should be end-to-end encrypted by -# default. -# -# Possible options are "all", "invite", and "off". They are defined as: -# -# * "all": any locally-created room -# * "invite": any room created with the "private_chat" or "trusted_private_chat" -# room creation presets -# * "off": this option will take no effect -# -# The default value is "off". -# -# Note that this option will only affect rooms created after it is set. It -# will also not affect rooms created by other servers. -# -#encryption_enabled_by_default_for_room_type: invite - - -# Uncomment to allow non-server-admin users to create groups on this server -# -#enable_group_creation: true - -# If enabled, non server admins can only create groups with local parts -# starting with this prefix -# -#group_creation_prefix: "unofficial_" - - - -# User Directory configuration -# -user_directory: - # Defines whether users can search the user directory. If false then - # empty responses are returned to all queries. Defaults to true. - # - # Uncomment to disable the user directory. - # - #enabled: false - - # Defines whether to search all users visible to your HS when searching - # the user directory, rather than limiting to users visible in public - # rooms. Defaults to false. - # - # If you set it true, you'll have to rebuild the user_directory search - # indexes, see: - # https://github.com/matrix-org/synapse/blob/master/docs/user_directory.md - # - # Uncomment to return search results containing all known users, even if that - # user does not share a room with the requester. - # - #search_all_users: true - - # Defines whether to prefer local users in search query results. - # If True, local users are more likely to appear above remote users - # when searching the user directory. Defaults to false. - # - # Uncomment to prefer local over remote users in user directory search - # results. - # - #prefer_local_users: true - - -# User Consent configuration -# -# for detailed instructions, see -# https://github.com/matrix-org/synapse/blob/master/docs/consent_tracking.md -# -# Parts of this section are required if enabling the 'consent' resource under -# 'listeners', in particular 'template_dir' and 'version'. -# -# 'template_dir' gives the location of the templates for the HTML forms. -# This directory should contain one subdirectory per language (eg, 'en', 'fr'), -# and each language directory should contain the policy document (named as -# '.html') and a success page (success.html). -# -# 'version' specifies the 'current' version of the policy document. It defines -# the version to be served by the consent resource if there is no 'v' -# parameter. -# -# 'server_notice_content', if enabled, will send a user a "Server Notice" -# asking them to consent to the privacy policy. The 'server_notices' section -# must also be configured for this to work. Notices will *not* be sent to -# guest users unless 'send_server_notice_to_guests' is set to true. -# -# 'block_events_error', if set, will block any attempts to send events -# until the user consents to the privacy policy. The value of the setting is -# used as the text of the error. -# -# 'require_at_registration', if enabled, will add a step to the registration -# process, similar to how captcha works. Users will be required to accept the -# policy before their account is created. -# -# 'policy_name' is the display name of the policy users will see when registering -# for an account. Has no effect unless `require_at_registration` is enabled. -# Defaults to "Privacy Policy". -# -#user_consent: -# template_dir: res/templates/privacy -# version: 1.0 -# server_notice_content: -# msgtype: m.text -# body: >- -# To continue using this homeserver you must review and agree to the -# terms and conditions at %(consent_uri)s -# send_server_notice_to_guests: true -# block_events_error: >- -# To continue using this homeserver you must review and agree to the -# terms and conditions at %(consent_uri)s -# require_at_registration: false -# policy_name: Privacy Policy -# - - - -# Settings for local room and user statistics collection. See -# docs/room_and_user_statistics.md. -# -stats: - # Uncomment the following to disable room and user statistics. Note that doing - # so may cause certain features (such as the room directory) not to work - # correctly. - # - #enabled: false - - # The size of each timeslice in the room_stats_historical and - # user_stats_historical tables, as a time period. Defaults to "1d". - # - #bucket_size: 1h - - -# Server Notices room configuration -# -# Uncomment this section to enable a room which can be used to send notices -# from the server to users. It is a special room which cannot be left; notices -# come from a special "notices" user id. -# -# If you uncomment this section, you *must* define the system_mxid_localpart -# setting, which defines the id of the user which will be used to send the -# notices. -# -# It's also possible to override the room name, the display name of the -# "notices" user, and the avatar for the user. -# -#server_notices: -# system_mxid_localpart: notices -# system_mxid_display_name: "Server Notices" -# system_mxid_avatar_url: "mxc://server.com/oumMVlgDnLYFaPVkExemNVVZ" -# room_name: "Server Notices" - - - -# Uncomment to disable searching the public room list. When disabled -# blocks searching local and remote room lists for local and remote -# users by always returning an empty list for all queries. -# -#enable_room_list_search: false - -# The `alias_creation` option controls who's allowed to create aliases -# on this server. -# -# The format of this option is a list of rules that contain globs that -# match against user_id, room_id and the new alias (fully qualified with -# server name). The action in the first rule that matches is taken, -# which can currently either be "allow" or "deny". -# -# Missing user_id/room_id/alias fields default to "*". -# -# If no rules match the request is denied. An empty list means no one -# can create aliases. -# -# Options for the rules include: -# -# user_id: Matches against the creator of the alias -# alias: Matches against the alias being created -# room_id: Matches against the room ID the alias is being pointed at -# action: Whether to "allow" or "deny" the request if the rule matches -# -# The default is: -# -#alias_creation_rules: -# - user_id: "*" -# alias: "*" -# room_id: "*" -# action: allow - -# The `room_list_publication_rules` option controls who can publish and -# which rooms can be published in the public room list. -# -# The format of this option is the same as that for -# `alias_creation_rules`. -# -# If the room has one or more aliases associated with it, only one of -# the aliases needs to match the alias rule. If there are no aliases -# then only rules with `alias: *` match. -# -# If no rules match the request is denied. An empty list means no one -# can publish rooms. -# -# Options for the rules include: -# -# user_id: Matches against the creator of the alias -# room_id: Matches against the room ID being published -# alias: Matches against any current local or canonical aliases -# associated with the room -# action: Whether to "allow" or "deny" the request if the rule matches -# -# The default is: -# -#room_list_publication_rules: -# - user_id: "*" -# alias: "*" -# room_id: "*" -# action: allow - - -# Server admins can define a Python module that implements extra rules for -# allowing or denying incoming events. In order to work, this module needs to -# override the methods defined in synapse/events/third_party_rules.py. -# -# This feature is designed to be used in closed federations only, where each -# participating server enforces the same rules. -# -#third_party_event_rules: -# module: "my_custom_project.SuperRulesSet" -# config: -# example_option: 'things' - - -## Opentracing ## - -# These settings enable opentracing, which implements distributed tracing. -# This allows you to observe the causal chains of events across servers -# including requests, key lookups etc., across any server running -# synapse or any other other services which supports opentracing -# (specifically those implemented with Jaeger). -# -opentracing: - # tracing is disabled by default. Uncomment the following line to enable it. - # - #enabled: true - - # The list of homeservers we wish to send and receive span contexts and span baggage. - # See docs/opentracing.rst. - # - # This is a list of regexes which are matched against the server_name of the - # homeserver. - # - # By default, it is empty, so no servers are matched. - # - #homeserver_whitelist: - # - ".*" - - # A list of the matrix IDs of users whose requests will always be traced, - # even if the tracing system would otherwise drop the traces due to - # probabilistic sampling. - # - # By default, the list is empty. - # - #force_tracing_for_users: - # - "@user1:server_name" - # - "@user2:server_name" - - # Jaeger can be configured to sample traces at different rates. - # All configuration options provided by Jaeger can be set here. - # Jaeger's configuration is mostly related to trace sampling which - # is documented here: - # https://www.jaegertracing.io/docs/latest/sampling/. - # - #jaeger_config: - # sampler: - # type: const - # param: 1 - # logging: - # false - - -## Workers ## - -# Disables sending of outbound federation transactions on the main process. -# Uncomment if using a federation sender worker. -# -#send_federation: false - -# It is possible to run multiple federation sender workers, in which case the -# work is balanced across them. -# -# This configuration must be shared between all federation sender workers, and if -# changed all federation sender workers must be stopped at the same time and then -# started, to ensure that all instances are running with the same config (otherwise -# events may be dropped). -# -#federation_sender_instances: -# - federation_sender1 - -# When using workers this should be a map from `worker_name` to the -# HTTP replication listener of the worker, if configured. -# -#instance_map: -# worker1: -# host: localhost -# port: 8034 - -# Experimental: When using workers you can define which workers should -# handle event persistence and typing notifications. Any worker -# specified here must also be in the `instance_map`. -# -#stream_writers: -# events: worker1 -# typing: worker1 - -# The worker that is used to run background tasks (e.g. cleaning up expired -# data). If not provided this defaults to the main process. -# -#run_background_tasks_on: worker1 - -# A shared secret used by the replication APIs to authenticate HTTP requests -# from workers. -# -# By default this is unused and traffic is not authenticated. -# -#worker_replication_secret: "" - - -# Configuration for Redis when using workers. This *must* be enabled when -# using workers (unless using old style direct TCP configuration). -# -redis: - # Uncomment the below to enable Redis support. - # - #enabled: true - - # Optional host and port to use to connect to redis. Defaults to - # localhost and 6379 - # - #host: localhost - #port: 6379 - - # Optional password if configured on the Redis instance - # - #password: - - -# Enable experimental features in Synapse. -# -# Experimental features might break or be removed without a deprecation -# period. -# -experimental_features: - # Support for Spaces (MSC1772), it enables the following: - # - # * The Spaces Summary API (MSC2946). - # * Restricting room membership based on space membership (MSC3083). - # - # Uncomment to disable support for Spaces. - #spaces_enabled: false - - -# vim:ft=yaml From c5b7ea19ce9f68c2728d70cbd3e468f5d5e8e713 Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Tue, 13 Jul 2021 11:13:56 +0200 Subject: [PATCH 023/184] CI: add lints --- .github/workflows/lint.yml | 8 ++++++++ .github/workflows/test.yml | 4 ++-- .pre-commit-config.yaml | 7 ++++--- 3 files changed, 14 insertions(+), 5 deletions(-) create mode 100644 .github/workflows/lint.yml diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml new file mode 100644 index 0000000..5d7dfbf --- /dev/null +++ b/.github/workflows/lint.yml @@ -0,0 +1,8 @@ +name: Lint +on: [push, pull_request] +jobs: + lint: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: psf/black@stable diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 31504e0..1e1a427 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -1,5 +1,5 @@ -name: build docker image and run tests inside -on: push +name: Tests +on: [push, pull_request] jobs: tests: runs-on: ubuntu-latest diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index cab3bdf..7c917da 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -14,10 +14,11 @@ repos: - id: flake8 - id: mixed-line-ending - id: trailing-whitespace -- repo: https://github.com/pre-commit/mirrors-yapf - rev: v0.29.0 +- repo: https://github.com/psf/black + rev: stable hooks: - - id: yapf + - id: black + language_version: python3 - repo: https://github.com/PyCQA/pydocstyle rev: 5.0.1 hooks: From 32208294ea22fd7ad6d06cfba952227904dc97d3 Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Tue, 13 Jul 2021 11:15:04 +0200 Subject: [PATCH 024/184] update pre-commit --- .pre-commit-config.yaml | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 7c917da..6fc3f8b 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v2.4.0 + rev: v4.0.1 hooks: - id: check-added-large-files - id: check-ast @@ -11,18 +11,21 @@ repos: - id: debug-statements - id: detect-private-key - id: end-of-file-fixer - - id: flake8 - id: mixed-line-ending - id: trailing-whitespace - repo: https://github.com/psf/black - rev: stable + rev: 21.6b0 hooks: - id: black language_version: python3 - repo: https://github.com/PyCQA/pydocstyle - rev: 5.0.1 + rev: 6.1.1 hooks: - id: pydocstyle +- repo: https://github.com/PyCQA/flake8 + rev: 3.9.2 + hooks: + - id: flake8 - repo: https://gitlab.com/smop/pre-commit-hooks rev: v1.0.0 hooks: From 91b23cd166a95a8216ac665fbd8463bd496c5990 Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Tue, 13 Jul 2021 11:28:05 +0200 Subject: [PATCH 025/184] poetry --- poetry.lock | 934 +++++++++++++++++++++++++++++++++++++++++++++++++ pyproject.toml | 25 ++ setup.cfg | 2 - 3 files changed, 959 insertions(+), 2 deletions(-) create mode 100644 poetry.lock create mode 100644 pyproject.toml delete mode 100644 setup.cfg diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..fcf852f --- /dev/null +++ b/poetry.lock @@ -0,0 +1,934 @@ +[[package]] +name = "aiofiles" +version = "0.6.0" +description = "File support for asyncio." +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "aiohttp" +version = "3.7.4.post0" +description = "Async http client/server framework (asyncio)" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +async-timeout = ">=3.0,<4.0" +attrs = ">=17.3.0" +chardet = ">=2.0,<5.0" +multidict = ">=4.5,<7.0" +typing-extensions = ">=3.6.5" +yarl = ">=1.0,<2.0" + +[package.extras] +speedups = ["aiodns", "brotlipy", "cchardet"] + +[[package]] +name = "aiohttp-socks" +version = "0.6.0" +description = "Proxy connector for aiohttp" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +aiohttp = ">=2.3.2" +attrs = ">=19.2.0" +python-socks = {version = ">=1.2.2", extras = ["asyncio"]} + +[[package]] +name = "anyio" +version = "3.2.1" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +category = "dev" +optional = false +python-versions = ">=3.6.2" + +[package.dependencies] +idna = ">=2.8" +sniffio = ">=1.1" + +[package.extras] +doc = ["sphinx-rtd-theme", "sphinx-autodoc-typehints (>=1.2.0)"] +test = ["coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "pytest (>=6.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (<0.15)", "mock (>=4)", "uvloop (>=0.15)"] +trio = ["trio (>=0.16)"] + +[[package]] +name = "appdirs" +version = "1.4.4" +description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "async-timeout" +version = "3.0.1" +description = "Timeout context manager for asyncio programs" +category = "main" +optional = false +python-versions = ">=3.5.3" + +[[package]] +name = "attrs" +version = "21.2.0" +description = "Classes Without Boilerplate" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.extras] +dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit"] +docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface"] +tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins"] + +[[package]] +name = "black" +version = "21.6b0" +description = "The uncompromising code formatter." +category = "dev" +optional = false +python-versions = ">=3.6.2" + +[package.dependencies] +appdirs = "*" +click = ">=7.1.2" +mypy-extensions = ">=0.4.3" +pathspec = ">=0.8.1,<1" +regex = ">=2020.1.8" +toml = ">=0.10.1" + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.6.0)", "aiohttp-cors (>=0.4.0)"] +python2 = ["typed-ast (>=1.4.2)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "certifi" +version = "2021.5.30" +description = "Python package for providing Mozilla's CA Bundle." +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "chardet" +version = "4.0.0" +description = "Universal encoding detector for Python 2 and 3" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "click" +version = "8.0.1" +description = "Composable command line interface toolkit" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.4" +description = "Cross-platform colored terminal text." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "coverage" +version = "5.5" +description = "Code coverage measurement for Python" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" + +[package.extras] +toml = ["toml"] + +[[package]] +name = "flake8" +version = "3.9.2" +description = "the modular source code checker: pep8 pyflakes and co" +category = "dev" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" + +[package.dependencies] +mccabe = ">=0.6.0,<0.7.0" +pycodestyle = ">=2.7.0,<2.8.0" +pyflakes = ">=2.3.0,<2.4.0" + +[[package]] +name = "future" +version = "0.18.2" +description = "Clean single-source support for Python 3 and 2" +category = "main" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "h11" +version = "0.12.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "h2" +version = "4.0.0" +description = "HTTP/2 State-Machine based protocol implementation" +category = "main" +optional = false +python-versions = ">=3.6.1" + +[package.dependencies] +hpack = ">=4.0,<5" +hyperframe = ">=6.0,<7" + +[[package]] +name = "hpack" +version = "4.0.0" +description = "Pure-Python HPACK header compression" +category = "main" +optional = false +python-versions = ">=3.6.1" + +[[package]] +name = "httpcore" +version = "0.13.6" +description = "A minimal low-level HTTP client." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +anyio = ">=3.0.0,<4.0.0" +h11 = ">=0.11,<0.13" +sniffio = ">=1.0.0,<2.0.0" + +[package.extras] +http2 = ["h2 (>=3,<5)"] + +[[package]] +name = "httpx" +version = "0.18.2" +description = "The next generation HTTP client." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +certifi = "*" +httpcore = ">=0.13.3,<0.14.0" +rfc3986 = {version = ">=1.3,<2", extras = ["idna2008"]} +sniffio = "*" + +[package.extras] +brotli = ["brotlicffi (>=1.0.0,<2.0.0)"] +http2 = ["h2 (>=3.0.0,<4.0.0)"] + +[[package]] +name = "hyperframe" +version = "6.0.1" +description = "HTTP/2 framing layer for Python" +category = "main" +optional = false +python-versions = ">=3.6.1" + +[[package]] +name = "idna" +version = "3.2" +description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "jsonschema" +version = "3.2.0" +description = "An implementation of JSON Schema validation for Python" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +attrs = ">=17.4.0" +pyrsistent = ">=0.14.0" +six = ">=1.11.0" + +[package.extras] +format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] +format_nongpl = ["idna", "jsonpointer (>1.13)", "webcolors", "rfc3986-validator (>0.1.0)", "rfc3339-validator"] + +[[package]] +name = "logbook" +version = "1.5.3" +description = "A logging replacement for Python" +category = "main" +optional = false +python-versions = "*" + +[package.extras] +all = ["redis", "brotli", "pytest (>4.0)", "execnet (>=1.0.9)", "cython", "pyzmq", "pytest-cov (>=2.6)", "sqlalchemy", "jinja2"] +compression = ["brotli"] +dev = ["pytest-cov (>=2.6)", "pytest (>4.0)", "cython"] +execnet = ["execnet (>=1.0.9)"] +jinja = ["jinja2"] +redis = ["redis"] +sqlalchemy = ["sqlalchemy"] +test = ["pytest-cov (>=2.6)", "pytest (>4.0)"] +zmq = ["pyzmq"] + +[[package]] +name = "markdown" +version = "3.3.4" +description = "Python implementation of Markdown." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.extras] +testing = ["coverage", "pyyaml"] + +[[package]] +name = "matrix-nio" +version = "0.18.3" +description = "A Python Matrix client library, designed according to sans I/O principles." +category = "main" +optional = false +python-versions = ">=3.6.1,<4.0.0" + +[package.dependencies] +aiofiles = ">=0.6.0,<0.7.0" +aiohttp = ">=3.7.4,<4.0.0" +aiohttp-socks = ">=0.6.0,<0.7.0" +future = ">=0.18.2,<0.19.0" +h11 = ">=0.12.0,<0.13.0" +h2 = ">=4.0.0,<5.0.0" +jsonschema = ">=3.2.0,<4.0.0" +logbook = ">=1.5.3,<2.0.0" +pycryptodome = ">=3.10.1,<4.0.0" +unpaddedbase64 = ">=2.1.0,<3.0.0" + +[package.extras] +e2e = ["python-olm (>=3.1.3,<4.0.0)", "peewee (>=3.14.4,<4.0.0)", "cachetools (>=4.2.1,<5.0.0)", "atomicwrites (>=1.4.0,<2.0.0)"] + +[[package]] +name = "mccabe" +version = "0.6.1" +description = "McCabe checker, plugin for flake8" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "multidict" +version = "5.1.0" +description = "multidict implementation" +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "mypy-extensions" +version = "0.4.3" +description = "Experimental type system extensions for programs checked with the mypy typechecker." +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "pathspec" +version = "0.8.1" +description = "Utility library for gitignore style pattern matching of file paths." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "pycodestyle" +version = "2.7.0" +description = "Python style guide checker" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "pycryptodome" +version = "3.10.1" +description = "Cryptographic library for Python" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "pydocstyle" +version = "6.1.1" +description = "Python docstring style checker" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +snowballstemmer = "*" + +[package.extras] +toml = ["toml"] + +[[package]] +name = "pyflakes" +version = "2.3.1" +description = "passive checker of Python programs" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "pyrsistent" +version = "0.18.0" +description = "Persistent/Functional/Immutable data structures" +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "python-socks" +version = "1.2.4" +description = "Core proxy (SOCKS4, SOCKS5, HTTP tunneling) functionality for Python" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +async-timeout = {version = ">=3.0.1", optional = true, markers = "extra == \"asyncio\""} + +[package.extras] +asyncio = ["async-timeout (>=3.0.1)"] +curio = ["curio (>=1.4)"] +trio = ["trio (>=0.16.0)"] + +[[package]] +name = "regex" +version = "2021.7.6" +description = "Alternative regular expression module, to replace re." +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "rfc3986" +version = "1.5.0" +description = "Validating URI References per RFC 3986" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +idna = {version = "*", optional = true, markers = "extra == \"idna2008\""} + +[package.extras] +idna2008 = ["idna"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "sniffio" +version = "1.2.0" +description = "Sniff out which async library your code is running under" +category = "dev" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "snowballstemmer" +version = "2.1.0" +description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +category = "dev" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "typing-extensions" +version = "3.10.0.0" +description = "Backported and Experimental Type Hints for Python 3.5+" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "unpaddedbase64" +version = "2.1.0" +description = "Encode and decode Base64 without \"=\" padding" +category = "main" +optional = false +python-versions = ">=3.6,<4.0" + +[[package]] +name = "yarl" +version = "1.6.3" +description = "Yet another URL library" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" + +[metadata] +lock-version = "1.1" +python-versions = "^3.9" +content-hash = "874383a5a8800e8de069e1a513b39e84a8afff9f192cf9dc69f72512f2f4cf31" + +[metadata.files] +aiofiles = [ + {file = "aiofiles-0.6.0-py3-none-any.whl", hash = "sha256:bd3019af67f83b739f8e4053c6c0512a7f545b9a8d91aaeab55e6e0f9d123c27"}, + {file = "aiofiles-0.6.0.tar.gz", hash = "sha256:e0281b157d3d5d59d803e3f4557dcc9a3dff28a4dd4829a9ff478adae50ca092"}, +] +aiohttp = [ + {file = "aiohttp-3.7.4.post0-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:3cf75f7cdc2397ed4442594b935a11ed5569961333d49b7539ea741be2cc79d5"}, + {file = "aiohttp-3.7.4.post0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:4b302b45040890cea949ad092479e01ba25911a15e648429c7c5aae9650c67a8"}, + {file = "aiohttp-3.7.4.post0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:fe60131d21b31fd1a14bd43e6bb88256f69dfc3188b3a89d736d6c71ed43ec95"}, + {file = "aiohttp-3.7.4.post0-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:393f389841e8f2dfc86f774ad22f00923fdee66d238af89b70ea314c4aefd290"}, + {file = "aiohttp-3.7.4.post0-cp36-cp36m-manylinux2014_ppc64le.whl", hash = "sha256:c6e9dcb4cb338d91a73f178d866d051efe7c62a7166653a91e7d9fb18274058f"}, + {file = "aiohttp-3.7.4.post0-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:5df68496d19f849921f05f14f31bd6ef53ad4b00245da3195048c69934521809"}, + {file = "aiohttp-3.7.4.post0-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:0563c1b3826945eecd62186f3f5c7d31abb7391fedc893b7e2b26303b5a9f3fe"}, + {file = "aiohttp-3.7.4.post0-cp36-cp36m-win32.whl", hash = "sha256:3d78619672183be860b96ed96f533046ec97ca067fd46ac1f6a09cd9b7484287"}, + {file = "aiohttp-3.7.4.post0-cp36-cp36m-win_amd64.whl", hash = "sha256:f705e12750171c0ab4ef2a3c76b9a4024a62c4103e3a55dd6f99265b9bc6fcfc"}, + {file = "aiohttp-3.7.4.post0-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:230a8f7e24298dea47659251abc0fd8b3c4e38a664c59d4b89cca7f6c09c9e87"}, + {file = "aiohttp-3.7.4.post0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:2e19413bf84934d651344783c9f5e22dee452e251cfd220ebadbed2d9931dbf0"}, + {file = "aiohttp-3.7.4.post0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:e4b2b334e68b18ac9817d828ba44d8fcb391f6acb398bcc5062b14b2cbeac970"}, + {file = "aiohttp-3.7.4.post0-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:d012ad7911653a906425d8473a1465caa9f8dea7fcf07b6d870397b774ea7c0f"}, + {file = "aiohttp-3.7.4.post0-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:40eced07f07a9e60e825554a31f923e8d3997cfc7fb31dbc1328c70826e04cde"}, + {file = "aiohttp-3.7.4.post0-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:209b4a8ee987eccc91e2bd3ac36adee0e53a5970b8ac52c273f7f8fd4872c94c"}, + {file = "aiohttp-3.7.4.post0-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:14762875b22d0055f05d12abc7f7d61d5fd4fe4642ce1a249abdf8c700bf1fd8"}, + {file = "aiohttp-3.7.4.post0-cp37-cp37m-win32.whl", hash = "sha256:7615dab56bb07bff74bc865307aeb89a8bfd9941d2ef9d817b9436da3a0ea54f"}, + {file = "aiohttp-3.7.4.post0-cp37-cp37m-win_amd64.whl", hash = "sha256:d9e13b33afd39ddeb377eff2c1c4f00544e191e1d1dee5b6c51ddee8ea6f0cf5"}, + {file = "aiohttp-3.7.4.post0-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:547da6cacac20666422d4882cfcd51298d45f7ccb60a04ec27424d2f36ba3eaf"}, + {file = "aiohttp-3.7.4.post0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:af9aa9ef5ba1fd5b8c948bb11f44891968ab30356d65fd0cc6707d989cd521df"}, + {file = "aiohttp-3.7.4.post0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:64322071e046020e8797117b3658b9c2f80e3267daec409b350b6a7a05041213"}, + {file = "aiohttp-3.7.4.post0-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:bb437315738aa441251214dad17428cafda9cdc9729499f1d6001748e1d432f4"}, + {file = "aiohttp-3.7.4.post0-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:e54962802d4b8b18b6207d4a927032826af39395a3bd9196a5af43fc4e60b009"}, + {file = "aiohttp-3.7.4.post0-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:a00bb73540af068ca7390e636c01cbc4f644961896fa9363154ff43fd37af2f5"}, + {file = "aiohttp-3.7.4.post0-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:79ebfc238612123a713a457d92afb4096e2148be17df6c50fb9bf7a81c2f8013"}, + {file = "aiohttp-3.7.4.post0-cp38-cp38-win32.whl", hash = "sha256:515dfef7f869a0feb2afee66b957cc7bbe9ad0cdee45aec7fdc623f4ecd4fb16"}, + {file = "aiohttp-3.7.4.post0-cp38-cp38-win_amd64.whl", hash = "sha256:114b281e4d68302a324dd33abb04778e8557d88947875cbf4e842c2c01a030c5"}, + {file = "aiohttp-3.7.4.post0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:7b18b97cf8ee5452fa5f4e3af95d01d84d86d32c5e2bfa260cf041749d66360b"}, + {file = "aiohttp-3.7.4.post0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:15492a6368d985b76a2a5fdd2166cddfea5d24e69eefed4630cbaae5c81d89bd"}, + {file = "aiohttp-3.7.4.post0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:bdb230b4943891321e06fc7def63c7aace16095be7d9cf3b1e01be2f10fba439"}, + {file = "aiohttp-3.7.4.post0-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:cffe3ab27871bc3ea47df5d8f7013945712c46a3cc5a95b6bee15887f1675c22"}, + {file = "aiohttp-3.7.4.post0-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:f881853d2643a29e643609da57b96d5f9c9b93f62429dcc1cbb413c7d07f0e1a"}, + {file = "aiohttp-3.7.4.post0-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:a5ca29ee66f8343ed336816c553e82d6cade48a3ad702b9ffa6125d187e2dedb"}, + {file = "aiohttp-3.7.4.post0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:17c073de315745a1510393a96e680d20af8e67e324f70b42accbd4cb3315c9fb"}, + {file = "aiohttp-3.7.4.post0-cp39-cp39-win32.whl", hash = "sha256:932bb1ea39a54e9ea27fc9232163059a0b8855256f4052e776357ad9add6f1c9"}, + {file = "aiohttp-3.7.4.post0-cp39-cp39-win_amd64.whl", hash = "sha256:02f46fc0e3c5ac58b80d4d56eb0a7c7d97fcef69ace9326289fb9f1955e65cfe"}, + {file = "aiohttp-3.7.4.post0.tar.gz", hash = "sha256:493d3299ebe5f5a7c66b9819eacdcfbbaaf1a8e84911ddffcdc48888497afecf"}, +] +aiohttp-socks = [ + {file = "aiohttp_socks-0.6.0-py3-none-any.whl", hash = "sha256:db7aa48c0758ee45d7dbc1fde499912ec6fb77eab77a6e2808825d1f41d4e300"}, + {file = "aiohttp_socks-0.6.0.tar.gz", hash = "sha256:b95dcfba3740c8499f4ff633846d72108459d25650ebc1ae8b299cb817088013"}, +] +anyio = [ + {file = "anyio-3.2.1-py3-none-any.whl", hash = "sha256:442678a3c7e1cdcdbc37dcfe4527aa851b1b0c9162653b516e9f509821691d50"}, + {file = "anyio-3.2.1.tar.gz", hash = "sha256:07968db9fa7c1ca5435a133dc62f988d84ef78e1d9b22814a59d1c62618afbc5"}, +] +appdirs = [ + {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, + {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, +] +async-timeout = [ + {file = "async-timeout-3.0.1.tar.gz", hash = "sha256:0c3c816a028d47f659d6ff5c745cb2acf1f966da1fe5c19c77a70282b25f4c5f"}, + {file = "async_timeout-3.0.1-py3-none-any.whl", hash = "sha256:4291ca197d287d274d0b6cb5d6f8f8f82d434ed288f962539ff18cc9012f9ea3"}, +] +attrs = [ + {file = "attrs-21.2.0-py2.py3-none-any.whl", hash = "sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1"}, + {file = "attrs-21.2.0.tar.gz", hash = "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb"}, +] +black = [ + {file = "black-21.6b0-py3-none-any.whl", hash = "sha256:dfb8c5a069012b2ab1e972e7b908f5fb42b6bbabcba0a788b86dc05067c7d9c7"}, + {file = "black-21.6b0.tar.gz", hash = "sha256:dc132348a88d103016726fe360cb9ede02cecf99b76e3660ce6c596be132ce04"}, +] +certifi = [ + {file = "certifi-2021.5.30-py2.py3-none-any.whl", hash = "sha256:50b1e4f8446b06f41be7dd6338db18e0990601dce795c2b1686458aa7e8fa7d8"}, + {file = "certifi-2021.5.30.tar.gz", hash = "sha256:2bbf76fd432960138b3ef6dda3dde0544f27cbf8546c458e60baf371917ba9ee"}, +] +chardet = [ + {file = "chardet-4.0.0-py2.py3-none-any.whl", hash = "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5"}, + {file = "chardet-4.0.0.tar.gz", hash = "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa"}, +] +click = [ + {file = "click-8.0.1-py3-none-any.whl", hash = "sha256:fba402a4a47334742d782209a7c79bc448911afe1149d07bdabdf480b3e2f4b6"}, + {file = "click-8.0.1.tar.gz", hash = "sha256:8c04c11192119b1ef78ea049e0a6f0463e4c48ef00a30160c704337586f3ad7a"}, +] +colorama = [ + {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, + {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, +] +coverage = [ + {file = "coverage-5.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:b6d534e4b2ab35c9f93f46229363e17f63c53ad01330df9f2d6bd1187e5eaacf"}, + {file = "coverage-5.5-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:b7895207b4c843c76a25ab8c1e866261bcfe27bfaa20c192de5190121770672b"}, + {file = "coverage-5.5-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:c2723d347ab06e7ddad1a58b2a821218239249a9e4365eaff6649d31180c1669"}, + {file = "coverage-5.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:900fbf7759501bc7807fd6638c947d7a831fc9fdf742dc10f02956ff7220fa90"}, + {file = "coverage-5.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:004d1880bed2d97151facef49f08e255a20ceb6f9432df75f4eef018fdd5a78c"}, + {file = "coverage-5.5-cp27-cp27m-win32.whl", hash = "sha256:06191eb60f8d8a5bc046f3799f8a07a2d7aefb9504b0209aff0b47298333302a"}, + {file = "coverage-5.5-cp27-cp27m-win_amd64.whl", hash = "sha256:7501140f755b725495941b43347ba8a2777407fc7f250d4f5a7d2a1050ba8e82"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:372da284cfd642d8e08ef606917846fa2ee350f64994bebfbd3afb0040436905"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:8963a499849a1fc54b35b1c9f162f4108017b2e6db2c46c1bed93a72262ed083"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:869a64f53488f40fa5b5b9dcb9e9b2962a66a87dab37790f3fcfb5144b996ef5"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:4a7697d8cb0f27399b0e393c0b90f0f1e40c82023ea4d45d22bce7032a5d7b81"}, + {file = "coverage-5.5-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:8d0a0725ad7c1a0bcd8d1b437e191107d457e2ec1084b9f190630a4fb1af78e6"}, + {file = "coverage-5.5-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:51cb9476a3987c8967ebab3f0fe144819781fca264f57f89760037a2ea191cb0"}, + {file = "coverage-5.5-cp310-cp310-win_amd64.whl", hash = "sha256:c0891a6a97b09c1f3e073a890514d5012eb256845c451bd48f7968ef939bf4ae"}, + {file = "coverage-5.5-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:3487286bc29a5aa4b93a072e9592f22254291ce96a9fbc5251f566b6b7343cdb"}, + {file = "coverage-5.5-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:deee1077aae10d8fa88cb02c845cfba9b62c55e1183f52f6ae6a2df6a2187160"}, + {file = "coverage-5.5-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:f11642dddbb0253cc8853254301b51390ba0081750a8ac03f20ea8103f0c56b6"}, + {file = "coverage-5.5-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:6c90e11318f0d3c436a42409f2749ee1a115cd8b067d7f14c148f1ce5574d701"}, + {file = "coverage-5.5-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:30c77c1dc9f253283e34c27935fded5015f7d1abe83bc7821680ac444eaf7793"}, + {file = "coverage-5.5-cp35-cp35m-win32.whl", hash = "sha256:9a1ef3b66e38ef8618ce5fdc7bea3d9f45f3624e2a66295eea5e57966c85909e"}, + {file = "coverage-5.5-cp35-cp35m-win_amd64.whl", hash = "sha256:972c85d205b51e30e59525694670de6a8a89691186012535f9d7dbaa230e42c3"}, + {file = "coverage-5.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:af0e781009aaf59e25c5a678122391cb0f345ac0ec272c7961dc5455e1c40066"}, + {file = "coverage-5.5-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:74d881fc777ebb11c63736622b60cb9e4aee5cace591ce274fb69e582a12a61a"}, + {file = "coverage-5.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:92b017ce34b68a7d67bd6d117e6d443a9bf63a2ecf8567bb3d8c6c7bc5014465"}, + {file = "coverage-5.5-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:d636598c8305e1f90b439dbf4f66437de4a5e3c31fdf47ad29542478c8508bbb"}, + {file = "coverage-5.5-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:41179b8a845742d1eb60449bdb2992196e211341818565abded11cfa90efb821"}, + {file = "coverage-5.5-cp36-cp36m-win32.whl", hash = "sha256:040af6c32813fa3eae5305d53f18875bedd079960822ef8ec067a66dd8afcd45"}, + {file = "coverage-5.5-cp36-cp36m-win_amd64.whl", hash = "sha256:5fec2d43a2cc6965edc0bb9e83e1e4b557f76f843a77a2496cbe719583ce8184"}, + {file = "coverage-5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:18ba8bbede96a2c3dde7b868de9dcbd55670690af0988713f0603f037848418a"}, + {file = "coverage-5.5-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:2910f4d36a6a9b4214bb7038d537f015346f413a975d57ca6b43bf23d6563b53"}, + {file = "coverage-5.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:f0b278ce10936db1a37e6954e15a3730bea96a0997c26d7fee88e6c396c2086d"}, + {file = "coverage-5.5-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:796c9c3c79747146ebd278dbe1e5c5c05dd6b10cc3bcb8389dfdf844f3ead638"}, + {file = "coverage-5.5-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:53194af30d5bad77fcba80e23a1441c71abfb3e01192034f8246e0d8f99528f3"}, + {file = "coverage-5.5-cp37-cp37m-win32.whl", hash = "sha256:184a47bbe0aa6400ed2d41d8e9ed868b8205046518c52464fde713ea06e3a74a"}, + {file = "coverage-5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:2949cad1c5208b8298d5686d5a85b66aae46d73eec2c3e08c817dd3513e5848a"}, + {file = "coverage-5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:217658ec7187497e3f3ebd901afdca1af062b42cfe3e0dafea4cced3983739f6"}, + {file = "coverage-5.5-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1aa846f56c3d49205c952d8318e76ccc2ae23303351d9270ab220004c580cfe2"}, + {file = "coverage-5.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:24d4a7de75446be83244eabbff746d66b9240ae020ced65d060815fac3423759"}, + {file = "coverage-5.5-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d1f8bf7b90ba55699b3a5e44930e93ff0189aa27186e96071fac7dd0d06a1873"}, + {file = "coverage-5.5-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:970284a88b99673ccb2e4e334cfb38a10aab7cd44f7457564d11898a74b62d0a"}, + {file = "coverage-5.5-cp38-cp38-win32.whl", hash = "sha256:01d84219b5cdbfc8122223b39a954820929497a1cb1422824bb86b07b74594b6"}, + {file = "coverage-5.5-cp38-cp38-win_amd64.whl", hash = "sha256:2e0d881ad471768bf6e6c2bf905d183543f10098e3b3640fc029509530091502"}, + {file = "coverage-5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d1f9ce122f83b2305592c11d64f181b87153fc2c2bbd3bb4a3dde8303cfb1a6b"}, + {file = "coverage-5.5-cp39-cp39-manylinux1_i686.whl", hash = "sha256:13c4ee887eca0f4c5a247b75398d4114c37882658300e153113dafb1d76de529"}, + {file = "coverage-5.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:52596d3d0e8bdf3af43db3e9ba8dcdaac724ba7b5ca3f6358529d56f7a166f8b"}, + {file = "coverage-5.5-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:2cafbbb3af0733db200c9b5f798d18953b1a304d3f86a938367de1567f4b5bff"}, + {file = "coverage-5.5-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:44d654437b8ddd9eee7d1eaee28b7219bec228520ff809af170488fd2fed3e2b"}, + {file = "coverage-5.5-cp39-cp39-win32.whl", hash = "sha256:d314ed732c25d29775e84a960c3c60808b682c08d86602ec2c3008e1202e3bb6"}, + {file = "coverage-5.5-cp39-cp39-win_amd64.whl", hash = "sha256:13034c4409db851670bc9acd836243aeee299949bd5673e11844befcb0149f03"}, + {file = "coverage-5.5-pp36-none-any.whl", hash = "sha256:f030f8873312a16414c0d8e1a1ddff2d3235655a2174e3648b4fa66b3f2f1079"}, + {file = "coverage-5.5-pp37-none-any.whl", hash = "sha256:2a3859cb82dcbda1cfd3e6f71c27081d18aa251d20a17d87d26d4cd216fb0af4"}, + {file = "coverage-5.5.tar.gz", hash = "sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c"}, +] +flake8 = [ + {file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"}, + {file = "flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b"}, +] +future = [ + {file = "future-0.18.2.tar.gz", hash = "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d"}, +] +h11 = [ + {file = "h11-0.12.0-py3-none-any.whl", hash = "sha256:36a3cb8c0a032f56e2da7084577878a035d3b61d104230d4bd49c0c6b555a9c6"}, + {file = "h11-0.12.0.tar.gz", hash = "sha256:47222cb6067e4a307d535814917cd98fd0a57b6788ce715755fa2b6c28b56042"}, +] +h2 = [ + {file = "h2-4.0.0-py3-none-any.whl", hash = "sha256:ac9e293a1990b339d5d71b19c5fe630e3dd4d768c620d1730d355485323f1b25"}, + {file = "h2-4.0.0.tar.gz", hash = "sha256:bb7ac7099dd67a857ed52c815a6192b6b1f5ba6b516237fc24a085341340593d"}, +] +hpack = [ + {file = "hpack-4.0.0-py3-none-any.whl", hash = "sha256:84a076fad3dc9a9f8063ccb8041ef100867b1878b25ef0ee63847a5d53818a6c"}, + {file = "hpack-4.0.0.tar.gz", hash = "sha256:fc41de0c63e687ebffde81187a948221294896f6bdc0ae2312708df339430095"}, +] +httpcore = [ + {file = "httpcore-0.13.6-py3-none-any.whl", hash = "sha256:db4c0dcb8323494d01b8c6d812d80091a31e520033e7b0120883d6f52da649ff"}, + {file = "httpcore-0.13.6.tar.gz", hash = "sha256:b0d16f0012ec88d8cc848f5a55f8a03158405f4bca02ee49bc4ca2c1fda49f3e"}, +] +httpx = [ + {file = "httpx-0.18.2-py3-none-any.whl", hash = "sha256:979afafecb7d22a1d10340bafb403cf2cb75aff214426ff206521fc79d26408c"}, + {file = "httpx-0.18.2.tar.gz", hash = "sha256:9f99c15d33642d38bce8405df088c1c4cfd940284b4290cacbfb02e64f4877c6"}, +] +hyperframe = [ + {file = "hyperframe-6.0.1-py3-none-any.whl", hash = "sha256:0ec6bafd80d8ad2195c4f03aacba3a8265e57bc4cff261e802bf39970ed02a15"}, + {file = "hyperframe-6.0.1.tar.gz", hash = "sha256:ae510046231dc8e9ecb1a6586f63d2347bf4c8905914aa84ba585ae85f28a914"}, +] +idna = [ + {file = "idna-3.2-py3-none-any.whl", hash = "sha256:14475042e284991034cb48e06f6851428fb14c4dc953acd9be9a5e95c7b6dd7a"}, + {file = "idna-3.2.tar.gz", hash = "sha256:467fbad99067910785144ce333826c71fb0e63a425657295239737f7ecd125f3"}, +] +jsonschema = [ + {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, + {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, +] +logbook = [ + {file = "Logbook-1.5.3-cp27-cp27m-win32.whl", hash = "sha256:56ee54c11df3377314cedcd6507638f015b4b88c0238c2e01b5eb44fd3a6ad1b"}, + {file = "Logbook-1.5.3-cp27-cp27m-win_amd64.whl", hash = "sha256:2dc85f1510533fddb481e97677bb7bca913560862734c0b3b289bfed04f78c92"}, + {file = "Logbook-1.5.3-cp35-cp35m-win32.whl", hash = "sha256:94e2e11ff3c2304b0d09a36c6208e5ae756eb948b210e5cbd63cd8d27f911542"}, + {file = "Logbook-1.5.3-cp35-cp35m-win_amd64.whl", hash = "sha256:97fee1bd9605f76335b169430ed65e15e457a844b2121bd1d90a08cf7e30aba0"}, + {file = "Logbook-1.5.3-cp36-cp36m-win32.whl", hash = "sha256:7c533eb728b3d220b1b5414ba4635292d149d79f74f6973b4aa744c850ca944a"}, + {file = "Logbook-1.5.3-cp36-cp36m-win_amd64.whl", hash = "sha256:e18f7422214b1cf0240c56f884fd9c9b4ff9d0da2eabca9abccba56df7222f66"}, + {file = "Logbook-1.5.3-cp37-cp37m-win32.whl", hash = "sha256:8f76a2e7b1f72595f753228732f81ce342caf03babc3fed6bbdcf366f2f20f18"}, + {file = "Logbook-1.5.3-cp37-cp37m-win_amd64.whl", hash = "sha256:0cf2cdbfb65a03b5987d19109dacad13417809dcf697f66e1a7084fb21744ea9"}, + {file = "Logbook-1.5.3.tar.gz", hash = "sha256:66f454ada0f56eae43066f604a222b09893f98c1adc18df169710761b8f32fe8"}, +] +markdown = [ + {file = "Markdown-3.3.4-py3-none-any.whl", hash = "sha256:96c3ba1261de2f7547b46a00ea8463832c921d3f9d6aba3f255a6f71386db20c"}, + {file = "Markdown-3.3.4.tar.gz", hash = "sha256:31b5b491868dcc87d6c24b7e3d19a0d730d59d3e46f4eea6430a321bed387a49"}, +] +matrix-nio = [ + {file = "matrix-nio-0.18.3.tar.gz", hash = "sha256:7f2e92f5b219367e47824bfe8bd2b1a06ce83ae28956f112dd3c2112a4d27085"}, + {file = "matrix_nio-0.18.3-py3-none-any.whl", hash = "sha256:a28653f96760b045c7edc53b645872cf2facc1639dc8cf56d748cd5e54ed2d3d"}, +] +mccabe = [ + {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, + {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, +] +multidict = [ + {file = "multidict-5.1.0-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:b7993704f1a4b204e71debe6095150d43b2ee6150fa4f44d6d966ec356a8d61f"}, + {file = "multidict-5.1.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:9dd6e9b1a913d096ac95d0399bd737e00f2af1e1594a787e00f7975778c8b2bf"}, + {file = "multidict-5.1.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:f21756997ad8ef815d8ef3d34edd98804ab5ea337feedcd62fb52d22bf531281"}, + {file = "multidict-5.1.0-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:1ab820665e67373de5802acae069a6a05567ae234ddb129f31d290fc3d1aa56d"}, + {file = "multidict-5.1.0-cp36-cp36m-manylinux2014_ppc64le.whl", hash = "sha256:9436dc58c123f07b230383083855593550c4d301d2532045a17ccf6eca505f6d"}, + {file = "multidict-5.1.0-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:830f57206cc96ed0ccf68304141fec9481a096c4d2e2831f311bde1c404401da"}, + {file = "multidict-5.1.0-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:2e68965192c4ea61fff1b81c14ff712fc7dc15d2bd120602e4a3494ea6584224"}, + {file = "multidict-5.1.0-cp36-cp36m-win32.whl", hash = "sha256:2f1a132f1c88724674271d636e6b7351477c27722f2ed789f719f9e3545a3d26"}, + {file = "multidict-5.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:3a4f32116f8f72ecf2a29dabfb27b23ab7cdc0ba807e8459e59a93a9be9506f6"}, + {file = "multidict-5.1.0-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:46c73e09ad374a6d876c599f2328161bcd95e280f84d2060cf57991dec5cfe76"}, + {file = "multidict-5.1.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:018132dbd8688c7a69ad89c4a3f39ea2f9f33302ebe567a879da8f4ca73f0d0a"}, + {file = "multidict-5.1.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:4b186eb7d6ae7c06eb4392411189469e6a820da81447f46c0072a41c748ab73f"}, + {file = "multidict-5.1.0-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:3a041b76d13706b7fff23b9fc83117c7b8fe8d5fe9e6be45eee72b9baa75f348"}, + {file = "multidict-5.1.0-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:051012ccee979b2b06be928a6150d237aec75dd6bf2d1eeeb190baf2b05abc93"}, + {file = "multidict-5.1.0-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:6a4d5ce640e37b0efcc8441caeea8f43a06addace2335bd11151bc02d2ee31f9"}, + {file = "multidict-5.1.0-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:5cf3443199b83ed9e955f511b5b241fd3ae004e3cb81c58ec10f4fe47c7dce37"}, + {file = "multidict-5.1.0-cp37-cp37m-win32.whl", hash = "sha256:f200755768dc19c6f4e2b672421e0ebb3dd54c38d5a4f262b872d8cfcc9e93b5"}, + {file = "multidict-5.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:05c20b68e512166fddba59a918773ba002fdd77800cad9f55b59790030bab632"}, + {file = "multidict-5.1.0-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:54fd1e83a184e19c598d5e70ba508196fd0bbdd676ce159feb412a4a6664f952"}, + {file = "multidict-5.1.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:0e3c84e6c67eba89c2dbcee08504ba8644ab4284863452450520dad8f1e89b79"}, + {file = "multidict-5.1.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:dc862056f76443a0db4509116c5cd480fe1b6a2d45512a653f9a855cc0517456"}, + {file = "multidict-5.1.0-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:0e929169f9c090dae0646a011c8b058e5e5fb391466016b39d21745b48817fd7"}, + {file = "multidict-5.1.0-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:d81eddcb12d608cc08081fa88d046c78afb1bf8107e6feab5d43503fea74a635"}, + {file = "multidict-5.1.0-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:585fd452dd7782130d112f7ddf3473ffdd521414674c33876187e101b588738a"}, + {file = "multidict-5.1.0-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:37e5438e1c78931df5d3c0c78ae049092877e5e9c02dd1ff5abb9cf27a5914ea"}, + {file = "multidict-5.1.0-cp38-cp38-win32.whl", hash = "sha256:07b42215124aedecc6083f1ce6b7e5ec5b50047afa701f3442054373a6deb656"}, + {file = "multidict-5.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:929006d3c2d923788ba153ad0de8ed2e5ed39fdbe8e7be21e2f22ed06c6783d3"}, + {file = "multidict-5.1.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:b797515be8743b771aa868f83563f789bbd4b236659ba52243b735d80b29ed93"}, + {file = "multidict-5.1.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:d5c65bdf4484872c4af3150aeebe101ba560dcfb34488d9a8ff8dbcd21079647"}, + {file = "multidict-5.1.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b47a43177a5e65b771b80db71e7be76c0ba23cc8aa73eeeb089ed5219cdbe27d"}, + {file = "multidict-5.1.0-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:806068d4f86cb06af37cd65821554f98240a19ce646d3cd24e1c33587f313eb8"}, + {file = "multidict-5.1.0-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:46dd362c2f045095c920162e9307de5ffd0a1bfbba0a6e990b344366f55a30c1"}, + {file = "multidict-5.1.0-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:ace010325c787c378afd7f7c1ac66b26313b3344628652eacd149bdd23c68841"}, + {file = "multidict-5.1.0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:ecc771ab628ea281517e24fd2c52e8f31c41e66652d07599ad8818abaad38cda"}, + {file = "multidict-5.1.0-cp39-cp39-win32.whl", hash = "sha256:fc13a9524bc18b6fb6e0dbec3533ba0496bbed167c56d0aabefd965584557d80"}, + {file = "multidict-5.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:7df80d07818b385f3129180369079bd6934cf70469f99daaebfac89dca288359"}, + {file = "multidict-5.1.0.tar.gz", hash = "sha256:25b4e5f22d3a37ddf3effc0710ba692cfc792c2b9edfb9c05aefe823256e84d5"}, +] +mypy-extensions = [ + {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, + {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, +] +pathspec = [ + {file = "pathspec-0.8.1-py2.py3-none-any.whl", hash = "sha256:aa0cb481c4041bf52ffa7b0d8fa6cd3e88a2ca4879c533c9153882ee2556790d"}, + {file = "pathspec-0.8.1.tar.gz", hash = "sha256:86379d6b86d75816baba717e64b1a3a3469deb93bb76d613c9ce79edc5cb68fd"}, +] +pycodestyle = [ + {file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"}, + {file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"}, +] +pycryptodome = [ + {file = "pycryptodome-3.10.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1c5e1ca507de2ad93474be5cfe2bfa76b7cf039a1a32fc196f40935944871a06"}, + {file = "pycryptodome-3.10.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:6260e24d41149268122dd39d4ebd5941e9d107f49463f7e071fd397e29923b0c"}, + {file = "pycryptodome-3.10.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:3f840c49d38986f6e17dbc0673d37947c88bc9d2d9dba1c01b979b36f8447db1"}, + {file = "pycryptodome-3.10.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:2dea65df54349cdfa43d6b2e8edb83f5f8d6861e5cf7b1fbc3e34c5694c85e27"}, + {file = "pycryptodome-3.10.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:e61e363d9a5d7916f3a4ce984a929514c0df3daf3b1b2eb5e6edbb131ee771cf"}, + {file = "pycryptodome-3.10.1-cp27-cp27m-manylinux2014_aarch64.whl", hash = "sha256:2603c98ae04aac675fefcf71a6c87dc4bb74a75e9071ae3923bbc91a59f08d35"}, + {file = "pycryptodome-3.10.1-cp27-cp27m-win32.whl", hash = "sha256:38661348ecb71476037f1e1f553159b80d256c00f6c0b00502acac891f7116d9"}, + {file = "pycryptodome-3.10.1-cp27-cp27m-win_amd64.whl", hash = "sha256:1723ebee5561628ce96748501cdaa7afaa67329d753933296321f0be55358dce"}, + {file = "pycryptodome-3.10.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:77997519d8eb8a4adcd9a47b9cec18f9b323e296986528186c0e9a7a15d6a07e"}, + {file = "pycryptodome-3.10.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:99b2f3fc51d308286071d0953f92055504a6ffe829a832a9fc7a04318a7683dd"}, + {file = "pycryptodome-3.10.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:e0a4d5933a88a2c98bbe19c0c722f5483dc628d7a38338ac2cb64a7dbd34064b"}, + {file = "pycryptodome-3.10.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d3d6958d53ad307df5e8469cc44474a75393a434addf20ecd451f38a72fe29b8"}, + {file = "pycryptodome-3.10.1-cp27-cp27mu-manylinux2014_aarch64.whl", hash = "sha256:a8eb8b6ea09ec1c2535bf39914377bc8abcab2c7d30fa9225eb4fe412024e427"}, + {file = "pycryptodome-3.10.1-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:31c1df17b3dc5f39600a4057d7db53ac372f492c955b9b75dd439f5d8b460129"}, + {file = "pycryptodome-3.10.1-cp35-abi3-manylinux1_i686.whl", hash = "sha256:a3105a0eb63eacf98c2ecb0eb4aa03f77f40fbac2bdde22020bb8a536b226bb8"}, + {file = "pycryptodome-3.10.1-cp35-abi3-manylinux1_x86_64.whl", hash = "sha256:a92d5c414e8ee1249e850789052608f582416e82422502dc0ac8c577808a9067"}, + {file = "pycryptodome-3.10.1-cp35-abi3-manylinux2010_i686.whl", hash = "sha256:60386d1d4cfaad299803b45a5bc2089696eaf6cdd56f9fc17479a6f89595cfc8"}, + {file = "pycryptodome-3.10.1-cp35-abi3-manylinux2010_x86_64.whl", hash = "sha256:501ab36aae360e31d0ec370cf5ce8ace6cb4112060d099b993bc02b36ac83fb6"}, + {file = "pycryptodome-3.10.1-cp35-abi3-manylinux2014_aarch64.whl", hash = "sha256:fc7489a50323a0df02378bc2fff86eb69d94cc5639914346c736be981c6a02e7"}, + {file = "pycryptodome-3.10.1-cp35-abi3-win32.whl", hash = "sha256:9b6f711b25e01931f1c61ce0115245a23cdc8b80bf8539ac0363bdcf27d649b6"}, + {file = "pycryptodome-3.10.1-cp35-abi3-win_amd64.whl", hash = "sha256:7fd519b89585abf57bf47d90166903ec7b43af4fe23c92273ea09e6336af5c07"}, + {file = "pycryptodome-3.10.1-pp27-pypy_73-macosx_10_9_x86_64.whl", hash = "sha256:09c1555a3fa450e7eaca41ea11cd00afe7c91fef52353488e65663777d8524e0"}, + {file = "pycryptodome-3.10.1-pp27-pypy_73-manylinux1_x86_64.whl", hash = "sha256:758949ca62690b1540dfb24ad773c6da9cd0e425189e83e39c038bbd52b8e438"}, + {file = "pycryptodome-3.10.1-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:e3bf558c6aeb49afa9f0c06cee7fb5947ee5a1ff3bd794b653d39926b49077fa"}, + {file = "pycryptodome-3.10.1-pp27-pypy_73-win32.whl", hash = "sha256:f977cdf725b20f6b8229b0c87acb98c7717e742ef9f46b113985303ae12a99da"}, + {file = "pycryptodome-3.10.1-pp36-pypy36_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6d2df5223b12437e644ce0a3be7809471ffa71de44ccd28b02180401982594a6"}, + {file = "pycryptodome-3.10.1-pp36-pypy36_pp73-manylinux1_x86_64.whl", hash = "sha256:98213ac2b18dc1969a47bc65a79a8fca02a414249d0c8635abb081c7f38c91b6"}, + {file = "pycryptodome-3.10.1-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:12222a5edc9ca4a29de15fbd5339099c4c26c56e13c2ceddf0b920794f26165d"}, + {file = "pycryptodome-3.10.1-pp36-pypy36_pp73-win32.whl", hash = "sha256:6bbf7fee7b7948b29d7e71fcacf48bac0c57fb41332007061a933f2d996f9713"}, + {file = "pycryptodome-3.10.1.tar.gz", hash = "sha256:3e2e3a06580c5f190df843cdb90ea28d61099cf4924334d5297a995de68e4673"}, +] +pydocstyle = [ + {file = "pydocstyle-6.1.1-py3-none-any.whl", hash = "sha256:6987826d6775056839940041beef5c08cc7e3d71d63149b48e36727f70144dc4"}, + {file = "pydocstyle-6.1.1.tar.gz", hash = "sha256:1d41b7c459ba0ee6c345f2eb9ae827cab14a7533a88c5c6f7e94923f72df92dc"}, +] +pyflakes = [ + {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"}, + {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"}, +] +pyrsistent = [ + {file = "pyrsistent-0.18.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f4c8cabb46ff8e5d61f56a037974228e978f26bfefce4f61a4b1ac0ba7a2ab72"}, + {file = "pyrsistent-0.18.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:da6e5e818d18459fa46fac0a4a4e543507fe1110e808101277c5a2b5bab0cd2d"}, + {file = "pyrsistent-0.18.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:5e4395bbf841693eaebaa5bb5c8f5cdbb1d139e07c975c682ec4e4f8126e03d2"}, + {file = "pyrsistent-0.18.0-cp36-cp36m-win32.whl", hash = "sha256:527be2bfa8dc80f6f8ddd65242ba476a6c4fb4e3aedbf281dfbac1b1ed4165b1"}, + {file = "pyrsistent-0.18.0-cp36-cp36m-win_amd64.whl", hash = "sha256:2aaf19dc8ce517a8653746d98e962ef480ff34b6bc563fc067be6401ffb457c7"}, + {file = "pyrsistent-0.18.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58a70d93fb79dc585b21f9d72487b929a6fe58da0754fa4cb9f279bb92369396"}, + {file = "pyrsistent-0.18.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4916c10896721e472ee12c95cdc2891ce5890898d2f9907b1b4ae0f53588b710"}, + {file = "pyrsistent-0.18.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:73ff61b1411e3fb0ba144b8f08d6749749775fe89688093e1efef9839d2dcc35"}, + {file = "pyrsistent-0.18.0-cp37-cp37m-win32.whl", hash = "sha256:b29b869cf58412ca5738d23691e96d8aff535e17390128a1a52717c9a109da4f"}, + {file = "pyrsistent-0.18.0-cp37-cp37m-win_amd64.whl", hash = "sha256:097b96f129dd36a8c9e33594e7ebb151b1515eb52cceb08474c10a5479e799f2"}, + {file = "pyrsistent-0.18.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:772e94c2c6864f2cd2ffbe58bb3bdefbe2a32afa0acb1a77e472aac831f83427"}, + {file = "pyrsistent-0.18.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:c1a9ff320fa699337e05edcaae79ef8c2880b52720bc031b219e5b5008ebbdef"}, + {file = "pyrsistent-0.18.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:cd3caef37a415fd0dae6148a1b6957a8c5f275a62cca02e18474608cb263640c"}, + {file = "pyrsistent-0.18.0-cp38-cp38-win32.whl", hash = "sha256:e79d94ca58fcafef6395f6352383fa1a76922268fa02caa2272fff501c2fdc78"}, + {file = "pyrsistent-0.18.0-cp38-cp38-win_amd64.whl", hash = "sha256:a0c772d791c38bbc77be659af29bb14c38ced151433592e326361610250c605b"}, + {file = "pyrsistent-0.18.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d5ec194c9c573aafaceebf05fc400656722793dac57f254cd4741f3c27ae57b4"}, + {file = "pyrsistent-0.18.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:6b5eed00e597b5b5773b4ca30bd48a5774ef1e96f2a45d105db5b4ebb4bca680"}, + {file = "pyrsistent-0.18.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:48578680353f41dca1ca3dc48629fb77dfc745128b56fc01096b2530c13fd426"}, + {file = "pyrsistent-0.18.0-cp39-cp39-win32.whl", hash = "sha256:f3ef98d7b76da5eb19c37fda834d50262ff9167c65658d1d8f974d2e4d90676b"}, + {file = "pyrsistent-0.18.0-cp39-cp39-win_amd64.whl", hash = "sha256:404e1f1d254d314d55adb8d87f4f465c8693d6f902f67eb6ef5b4526dc58e6ea"}, + {file = "pyrsistent-0.18.0.tar.gz", hash = "sha256:773c781216f8c2900b42a7b638d5b517bb134ae1acbebe4d1e8f1f41ea60eb4b"}, +] +python-socks = [ + {file = "python-socks-1.2.4.tar.gz", hash = "sha256:7d0ef2578cead9f762b71317d25a6c118fabaf79535555e75b3e102f5158ddd8"}, + {file = "python_socks-1.2.4-py3-none-any.whl", hash = "sha256:9f12e8fe78629b87543fad0e4ea0ccf103a4fad6a7872c5d0ecb36d9903fa548"}, +] +regex = [ + {file = "regex-2021.7.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e6a1e5ca97d411a461041d057348e578dc344ecd2add3555aedba3b408c9f874"}, + {file = "regex-2021.7.6-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:6afe6a627888c9a6cfbb603d1d017ce204cebd589d66e0703309b8048c3b0854"}, + {file = "regex-2021.7.6-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:ccb3d2190476d00414aab36cca453e4596e8f70a206e2aa8db3d495a109153d2"}, + {file = "regex-2021.7.6-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:ed693137a9187052fc46eedfafdcb74e09917166362af4cc4fddc3b31560e93d"}, + {file = "regex-2021.7.6-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:99d8ab206a5270c1002bfcf25c51bf329ca951e5a169f3b43214fdda1f0b5f0d"}, + {file = "regex-2021.7.6-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:b85ac458354165405c8a84725de7bbd07b00d9f72c31a60ffbf96bb38d3e25fa"}, + {file = "regex-2021.7.6-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:3f5716923d3d0bfb27048242a6e0f14eecdb2e2a7fac47eda1d055288595f222"}, + {file = "regex-2021.7.6-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5983c19d0beb6af88cb4d47afb92d96751fb3fa1784d8785b1cdf14c6519407"}, + {file = "regex-2021.7.6-cp36-cp36m-win32.whl", hash = "sha256:c92831dac113a6e0ab28bc98f33781383fe294df1a2c3dfd1e850114da35fd5b"}, + {file = "regex-2021.7.6-cp36-cp36m-win_amd64.whl", hash = "sha256:791aa1b300e5b6e5d597c37c346fb4d66422178566bbb426dd87eaae475053fb"}, + {file = "regex-2021.7.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:59506c6e8bd9306cd8a41511e32d16d5d1194110b8cfe5a11d102d8b63cf945d"}, + {file = "regex-2021.7.6-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:564a4c8a29435d1f2256ba247a0315325ea63335508ad8ed938a4f14c4116a5d"}, + {file = "regex-2021.7.6-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:59c00bb8dd8775473cbfb967925ad2c3ecc8886b3b2d0c90a8e2707e06c743f0"}, + {file = "regex-2021.7.6-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:9a854b916806c7e3b40e6616ac9e85d3cdb7649d9e6590653deb5b341a736cec"}, + {file = "regex-2021.7.6-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:db2b7df831c3187a37f3bb80ec095f249fa276dbe09abd3d35297fc250385694"}, + {file = "regex-2021.7.6-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:173bc44ff95bc1e96398c38f3629d86fa72e539c79900283afa895694229fe6a"}, + {file = "regex-2021.7.6-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:15dddb19823f5147e7517bb12635b3c82e6f2a3a6b696cc3e321522e8b9308ad"}, + {file = "regex-2021.7.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ddeabc7652024803666ea09f32dd1ed40a0579b6fbb2a213eba590683025895"}, + {file = "regex-2021.7.6-cp37-cp37m-win32.whl", hash = "sha256:f080248b3e029d052bf74a897b9d74cfb7643537fbde97fe8225a6467fb559b5"}, + {file = "regex-2021.7.6-cp37-cp37m-win_amd64.whl", hash = "sha256:d8bbce0c96462dbceaa7ac4a7dfbbee92745b801b24bce10a98d2f2b1ea9432f"}, + {file = "regex-2021.7.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:edd1a68f79b89b0c57339bce297ad5d5ffcc6ae7e1afdb10f1947706ed066c9c"}, + {file = "regex-2021.7.6-cp38-cp38-manylinux1_i686.whl", hash = "sha256:422dec1e7cbb2efbbe50e3f1de36b82906def93ed48da12d1714cabcd993d7f0"}, + {file = "regex-2021.7.6-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:cbe23b323988a04c3e5b0c387fe3f8f363bf06c0680daf775875d979e376bd26"}, + {file = "regex-2021.7.6-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:0eb2c6e0fcec5e0f1d3bcc1133556563222a2ffd2211945d7b1480c1b1a42a6f"}, + {file = "regex-2021.7.6-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:1c78780bf46d620ff4fff40728f98b8afd8b8e35c3efd638c7df67be2d5cddbf"}, + {file = "regex-2021.7.6-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:bc84fb254a875a9f66616ed4538542fb7965db6356f3df571d783f7c8d256edd"}, + {file = "regex-2021.7.6-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:598c0a79b4b851b922f504f9f39a863d83ebdfff787261a5ed061c21e67dd761"}, + {file = "regex-2021.7.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:875c355360d0f8d3d827e462b29ea7682bf52327d500a4f837e934e9e4656068"}, + {file = "regex-2021.7.6-cp38-cp38-win32.whl", hash = "sha256:e586f448df2bbc37dfadccdb7ccd125c62b4348cb90c10840d695592aa1b29e0"}, + {file = "regex-2021.7.6-cp38-cp38-win_amd64.whl", hash = "sha256:2fe5e71e11a54e3355fa272137d521a40aace5d937d08b494bed4529964c19c4"}, + {file = "regex-2021.7.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6110bab7eab6566492618540c70edd4d2a18f40ca1d51d704f1d81c52d245026"}, + {file = "regex-2021.7.6-cp39-cp39-manylinux1_i686.whl", hash = "sha256:4f64fc59fd5b10557f6cd0937e1597af022ad9b27d454e182485f1db3008f417"}, + {file = "regex-2021.7.6-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:89e5528803566af4df368df2d6f503c84fbfb8249e6631c7b025fe23e6bd0cde"}, + {file = "regex-2021.7.6-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:2366fe0479ca0e9afa534174faa2beae87847d208d457d200183f28c74eaea59"}, + {file = "regex-2021.7.6-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:f9392a4555f3e4cb45310a65b403d86b589adc773898c25a39184b1ba4db8985"}, + {file = "regex-2021.7.6-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:2bceeb491b38225b1fee4517107b8491ba54fba77cf22a12e996d96a3c55613d"}, + {file = "regex-2021.7.6-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:f98dc35ab9a749276f1a4a38ab3e0e2ba1662ce710f6530f5b0a6656f1c32b58"}, + {file = "regex-2021.7.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:319eb2a8d0888fa6f1d9177705f341bc9455a2c8aca130016e52c7fe8d6c37a3"}, + {file = "regex-2021.7.6-cp39-cp39-win32.whl", hash = "sha256:eaf58b9e30e0e546cdc3ac06cf9165a1ca5b3de8221e9df679416ca667972035"}, + {file = "regex-2021.7.6-cp39-cp39-win_amd64.whl", hash = "sha256:4c9c3155fe74269f61e27617529b7f09552fbb12e44b1189cebbdb24294e6e1c"}, + {file = "regex-2021.7.6.tar.gz", hash = "sha256:8394e266005f2d8c6f0bc6780001f7afa3ef81a7a2111fa35058ded6fce79e4d"}, +] +rfc3986 = [ + {file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"}, + {file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"}, +] +six = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] +sniffio = [ + {file = "sniffio-1.2.0-py3-none-any.whl", hash = "sha256:471b71698eac1c2112a40ce2752bb2f4a4814c22a54a3eed3676bc0f5ca9f663"}, + {file = "sniffio-1.2.0.tar.gz", hash = "sha256:c4666eecec1d3f50960c6bdf61ab7bc350648da6c126e3cf6898d8cd4ddcd3de"}, +] +snowballstemmer = [ + {file = "snowballstemmer-2.1.0-py2.py3-none-any.whl", hash = "sha256:b51b447bea85f9968c13b650126a888aabd4cb4463fca868ec596826325dedc2"}, + {file = "snowballstemmer-2.1.0.tar.gz", hash = "sha256:e997baa4f2e9139951b6f4c631bad912dfd3c792467e2f03d7239464af90e914"}, +] +toml = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] +typing-extensions = [ + {file = "typing_extensions-3.10.0.0-py2-none-any.whl", hash = "sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497"}, + {file = "typing_extensions-3.10.0.0-py3-none-any.whl", hash = "sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84"}, + {file = "typing_extensions-3.10.0.0.tar.gz", hash = "sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342"}, +] +unpaddedbase64 = [ + {file = "unpaddedbase64-2.1.0-py3-none-any.whl", hash = "sha256:485eff129c30175d2cd6f0cd8d2310dff51e666f7f36175f738d75dfdbd0b1c6"}, + {file = "unpaddedbase64-2.1.0.tar.gz", hash = "sha256:7273c60c089de39d90f5d6d4a7883a79e319dc9d9b1c8924a7fab96178a5f005"}, +] +yarl = [ + {file = "yarl-1.6.3-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:0355a701b3998dcd832d0dc47cc5dedf3874f966ac7f870e0f3a6788d802d434"}, + {file = "yarl-1.6.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:bafb450deef6861815ed579c7a6113a879a6ef58aed4c3a4be54400ae8871478"}, + {file = "yarl-1.6.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:547f7665ad50fa8563150ed079f8e805e63dd85def6674c97efd78eed6c224a6"}, + {file = "yarl-1.6.3-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:63f90b20ca654b3ecc7a8d62c03ffa46999595f0167d6450fa8383bab252987e"}, + {file = "yarl-1.6.3-cp36-cp36m-manylinux2014_ppc64le.whl", hash = "sha256:97b5bdc450d63c3ba30a127d018b866ea94e65655efaf889ebeabc20f7d12406"}, + {file = "yarl-1.6.3-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:d8d07d102f17b68966e2de0e07bfd6e139c7c02ef06d3a0f8d2f0f055e13bb76"}, + {file = "yarl-1.6.3-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:15263c3b0b47968c1d90daa89f21fcc889bb4b1aac5555580d74565de6836366"}, + {file = "yarl-1.6.3-cp36-cp36m-win32.whl", hash = "sha256:b5dfc9a40c198334f4f3f55880ecf910adebdcb2a0b9a9c23c9345faa9185721"}, + {file = "yarl-1.6.3-cp36-cp36m-win_amd64.whl", hash = "sha256:b2e9a456c121e26d13c29251f8267541bd75e6a1ccf9e859179701c36a078643"}, + {file = "yarl-1.6.3-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:ce3beb46a72d9f2190f9e1027886bfc513702d748047b548b05dab7dfb584d2e"}, + {file = "yarl-1.6.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:2ce4c621d21326a4a5500c25031e102af589edb50c09b321049e388b3934eec3"}, + {file = "yarl-1.6.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:d26608cf178efb8faa5ff0f2d2e77c208f471c5a3709e577a7b3fd0445703ac8"}, + {file = "yarl-1.6.3-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:4c5bcfc3ed226bf6419f7a33982fb4b8ec2e45785a0561eb99274ebbf09fdd6a"}, + {file = "yarl-1.6.3-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:4736eaee5626db8d9cda9eb5282028cc834e2aeb194e0d8b50217d707e98bb5c"}, + {file = "yarl-1.6.3-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:68dc568889b1c13f1e4745c96b931cc94fdd0defe92a72c2b8ce01091b22e35f"}, + {file = "yarl-1.6.3-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:7356644cbed76119d0b6bd32ffba704d30d747e0c217109d7979a7bc36c4d970"}, + {file = "yarl-1.6.3-cp37-cp37m-win32.whl", hash = "sha256:00d7ad91b6583602eb9c1d085a2cf281ada267e9a197e8b7cae487dadbfa293e"}, + {file = "yarl-1.6.3-cp37-cp37m-win_amd64.whl", hash = "sha256:69ee97c71fee1f63d04c945f56d5d726483c4762845400a6795a3b75d56b6c50"}, + {file = "yarl-1.6.3-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:e46fba844f4895b36f4c398c5af062a9808d1f26b2999c58909517384d5deda2"}, + {file = "yarl-1.6.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:31ede6e8c4329fb81c86706ba8f6bf661a924b53ba191b27aa5fcee5714d18ec"}, + {file = "yarl-1.6.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:fcbb48a93e8699eae920f8d92f7160c03567b421bc17362a9ffbbd706a816f71"}, + {file = "yarl-1.6.3-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:72a660bdd24497e3e84f5519e57a9ee9220b6f3ac4d45056961bf22838ce20cc"}, + {file = "yarl-1.6.3-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:324ba3d3c6fee56e2e0b0d09bf5c73824b9f08234339d2b788af65e60040c959"}, + {file = "yarl-1.6.3-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:e6b5460dc5ad42ad2b36cca524491dfcaffbfd9c8df50508bddc354e787b8dc2"}, + {file = "yarl-1.6.3-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:6d6283d8e0631b617edf0fd726353cb76630b83a089a40933043894e7f6721e2"}, + {file = "yarl-1.6.3-cp38-cp38-win32.whl", hash = "sha256:9ede61b0854e267fd565e7527e2f2eb3ef8858b301319be0604177690e1a3896"}, + {file = "yarl-1.6.3-cp38-cp38-win_amd64.whl", hash = "sha256:f0b059678fd549c66b89bed03efcabb009075bd131c248ecdf087bdb6faba24a"}, + {file = "yarl-1.6.3-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:329412812ecfc94a57cd37c9d547579510a9e83c516bc069470db5f75684629e"}, + {file = "yarl-1.6.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:c49ff66d479d38ab863c50f7bb27dee97c6627c5fe60697de15529da9c3de724"}, + {file = "yarl-1.6.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f040bcc6725c821a4c0665f3aa96a4d0805a7aaf2caf266d256b8ed71b9f041c"}, + {file = "yarl-1.6.3-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:d5c32c82990e4ac4d8150fd7652b972216b204de4e83a122546dce571c1bdf25"}, + {file = "yarl-1.6.3-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:d597767fcd2c3dc49d6eea360c458b65643d1e4dbed91361cf5e36e53c1f8c96"}, + {file = "yarl-1.6.3-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:8aa3decd5e0e852dc68335abf5478a518b41bf2ab2f330fe44916399efedfae0"}, + {file = "yarl-1.6.3-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:73494d5b71099ae8cb8754f1df131c11d433b387efab7b51849e7e1e851f07a4"}, + {file = "yarl-1.6.3-cp39-cp39-win32.whl", hash = "sha256:5b883e458058f8d6099e4420f0cc2567989032b5f34b271c0827de9f1079a424"}, + {file = "yarl-1.6.3-cp39-cp39-win_amd64.whl", hash = "sha256:4953fb0b4fdb7e08b2f3b3be80a00d28c5c8a2056bb066169de00e6501b986b6"}, + {file = "yarl-1.6.3.tar.gz", hash = "sha256:8a9066529240171b68893d60dca86a763eae2139dd42f42106b03cf4b426bf10"}, +] diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..8c2ae72 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,25 @@ +[tool.poetry] +name = "matrix-webhook" +version = "2.1.0" +description = "Post a message to a matrix room with a simple HTTP POST" +authors = ["Guilhem Saurel "] +license = "BSD-2-Clause" + +[tool.poetry.dependencies] +python = "^3.9" +Markdown = "^3.3.4" +matrix-nio = "^0.18.3" + +[tool.poetry.dev-dependencies] +httpx = "^0.18.2" +coverage = "^5.5" +black = "^21.6b0" +pydocstyle = "^6.1.1" +flake8 = "^3.9.2" + +[tool.pydocstyle] +ignore = ["D203", "D204", "D212"] + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 4a3b956..0000000 --- a/setup.cfg +++ /dev/null @@ -1,2 +0,0 @@ -[pydocstyle] -ignore = D203,D204,D212 From 19ef1f4e93e3ae2d08cc09a568b16b85e5389ebd Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Tue, 13 Jul 2021 11:28:13 +0200 Subject: [PATCH 026/184] black --- matrix_webhook.py | 40 ++++++++++++++++++++++------------------ tests/start.py | 35 +++++++++++++++++++++++------------ tests/tests.py | 22 +++++++++++++++------- 3 files changed, 60 insertions(+), 37 deletions(-) diff --git a/matrix_webhook.py b/matrix_webhook.py index e9f2efa..5540f59 100755 --- a/matrix_webhook.py +++ b/matrix_webhook.py @@ -18,11 +18,11 @@ from markdown import markdown from nio import AsyncClient from nio.exceptions import LocalProtocolError -SERVER_ADDRESS = (os.environ.get('HOST', ''), int(os.environ.get('PORT', 4785))) -MATRIX_URL = os.environ.get('MATRIX_URL', 'https://matrix.org') -MATRIX_ID = os.environ.get('MATRIX_ID', '@wwm:matrix.org') -MATRIX_PW = os.environ['MATRIX_PW'] -API_KEY = os.environ['API_KEY'] +SERVER_ADDRESS = (os.environ.get("HOST", ""), int(os.environ.get("PORT", 4785))) +MATRIX_URL = os.environ.get("MATRIX_URL", "https://matrix.org") +MATRIX_ID = os.environ.get("MATRIX_ID", "@wwm:matrix.org") +MATRIX_PW = os.environ["MATRIX_PW"] +API_KEY = os.environ["API_KEY"] CLIENT = AsyncClient(MATRIX_URL, MATRIX_ID) @@ -37,20 +37,22 @@ async def handler(request): try: data = json.loads(data.decode()) except json.decoder.JSONDecodeError: - return create_json_response(HTTPStatus.BAD_REQUEST, 'Invalid JSON') + return create_json_response(HTTPStatus.BAD_REQUEST, "Invalid JSON") - if not all(key in data for key in ['text', 'key']): - return create_json_response(HTTPStatus.BAD_REQUEST, 'Missing text and/or API key property') + if not all(key in data for key in ["text", "key"]): + return create_json_response( + HTTPStatus.BAD_REQUEST, "Missing text and/or API key property" + ) - if data['key'] != API_KEY: - return create_json_response(HTTPStatus.UNAUTHORIZED, 'Invalid API key') + if data["key"] != API_KEY: + return create_json_response(HTTPStatus.UNAUTHORIZED, "Invalid API key") room_id = request.path[1:] content = { - 'msgtype': 'm.text', - 'body': data['text'], - 'format': 'org.matrix.custom.html', - 'formatted_body': markdown(str(data['text']), extensions=['extra']), + "msgtype": "m.text", + "body": data["text"], + "format": "org.matrix.custom.html", + "formatted_body": markdown(str(data["text"]), extensions=["extra"]), } try: await send_room_message(room_id, content) @@ -58,18 +60,20 @@ async def handler(request): await CLIENT.login(MATRIX_PW) await send_room_message(room_id, content) - return create_json_response(HTTPStatus.OK, 'OK') + return create_json_response(HTTPStatus.OK, "OK") def create_json_response(status, ret): """Create a JSON response.""" - response_data = {'status': status, 'ret': ret} + response_data = {"status": status, "ret": ret} return web.json_response(response_data, status=status) async def send_room_message(room_id, content): """Send a message to a room.""" - return await CLIENT.room_send(room_id=room_id, message_type='m.room.message', content=content) + return await CLIENT.room_send( + room_id=room_id, message_type="m.room.message", content=content + ) async def main(event): @@ -113,5 +117,5 @@ def run(): loop.close() -if __name__ == '__main__': +if __name__ == "__main__": run() diff --git a/tests/start.py b/tests/start.py index 97cc0f0..27c184c 100755 --- a/tests/start.py +++ b/tests/start.py @@ -10,21 +10,24 @@ import httpx import yaml from synapse._scripts.register_new_matrix_user import request_registration -BOT_URL = 'http://localhost:4785' -KEY, MATRIX_URL, MATRIX_ID, MATRIX_PW = (environ[v] for v in ['API_KEY', 'MATRIX_URL', 'MATRIX_ID', 'MATRIX_PW']) +BOT_URL = "http://localhost:4785" +KEY, MATRIX_URL, MATRIX_ID, MATRIX_PW = ( + environ[v] for v in ["API_KEY", "MATRIX_URL", "MATRIX_ID", "MATRIX_PW"] +) FULL_ID = f'@{MATRIX_ID}:{MATRIX_URL.split("/")[2]}' def bot_req(req=None, key=None, room_id=None): """Bot requests boilerplate.""" if key is not None: - req['key'] = key - url = BOT_URL if room_id is None else f'{BOT_URL}/{room_id}' + req["key"] = key + url = BOT_URL if room_id is None else f"{BOT_URL}/{room_id}" return httpx.post(url, json=req).json() def wait_available(url: str, key: str, timeout: int = 10) -> bool: """Wait until a service answer correctly or timeout.""" + def check_json(url: str, key: str) -> bool: """Ensure a service at a given url answers with valid json containing a certain key.""" try: @@ -44,18 +47,26 @@ def wait_available(url: str, key: str, timeout: int = 10) -> bool: def run_and_test(): """Launch the bot and its tests.""" # Start the server, and wait for it - srv = Popen(['python', '-m', 'synapse.app.homeserver', '--config-path', '/srv/homeserver.yaml']) - if not wait_available(f'{MATRIX_URL}/_matrix/client/r0/login', 'flows'): + srv = Popen( + [ + "python", + "-m", + "synapse.app.homeserver", + "--config-path", + "/srv/homeserver.yaml", + ] + ) + if not wait_available(f"{MATRIX_URL}/_matrix/client/r0/login", "flows"): return False # Register a user for the bot. - with open('/srv/homeserver.yaml') as f: + with open("/srv/homeserver.yaml") as f: secret = yaml.safe_load(f.read()).get("registration_shared_secret", None) request_registration(MATRIX_ID, MATRIX_PW, MATRIX_URL, secret, admin=True) # Start the bot, and wait for it - bot = Popen(['coverage', 'run', 'matrix_webhook.py']) - if not wait_available(BOT_URL, 'status'): + bot = Popen(["coverage", "run", "matrix_webhook.py"]) + if not wait_available(BOT_URL, "status"): return False # Run the main unittest module @@ -68,10 +79,10 @@ def run_and_test(): bot.terminate() - for cmd in ['report', 'html', 'xml']: - run(['coverage', cmd]) + for cmd in ["report", "html", "xml"]: + run(["coverage", cmd]) return ret -if __name__ == '__main__': +if __name__ == "__main__": exit(not run_and_test()) diff --git a/tests/tests.py b/tests/tests.py index 3bf710e..f6ec6f9 100644 --- a/tests/tests.py +++ b/tests/tests.py @@ -9,25 +9,33 @@ from .start import FULL_ID, KEY, MATRIX_ID, MATRIX_PW, MATRIX_URL, bot_req class BotTest(unittest.IsolatedAsyncioTestCase): """Main test class.""" + def test_errors(self): """Check the bot's error paths.""" - self.assertEqual(bot_req(), {'status': 400, 'ret': 'Invalid JSON'}) - self.assertEqual(bot_req({'toto': 3}), {'status': 400, 'ret': 'Missing text and/or API key property'}) - self.assertEqual(bot_req({'text': 3, 'key': None}), {'status': 401, 'ret': 'Invalid API key'}) + self.assertEqual(bot_req(), {"status": 400, "ret": "Invalid JSON"}) + self.assertEqual( + bot_req({"toto": 3}), + {"status": 400, "ret": "Missing text and/or API key property"}, + ) + self.assertEqual( + bot_req({"text": 3, "key": None}), {"status": 401, "ret": "Invalid API key"} + ) # TODO: we are not sending to a real room, so this should not be "OK" - self.assertEqual(bot_req({'text': 3}, KEY), {'status': 200, 'ret': 'OK'}) + self.assertEqual(bot_req({"text": 3}, KEY), {"status": 200, "ret": "OK"}) async def test_message(self): """Send a markdown message, and check the result.""" - text = '# Hello' + text = "# Hello" messages = [] client = nio.AsyncClient(MATRIX_URL, MATRIX_ID) await client.login(MATRIX_PW) room = await client.room_create() - self.assertEqual(bot_req({'text': text}, KEY, room.room_id), {'status': 200, 'ret': 'OK'}) + self.assertEqual( + bot_req({"text": text}, KEY, room.room_id), {"status": 200, "ret": "OK"} + ) sync = await client.sync() messages = await client.room_messages(room.room_id, sync.next_batch) @@ -36,4 +44,4 @@ class BotTest(unittest.IsolatedAsyncioTestCase): message = messages.chunk[0] self.assertEqual(message.sender, FULL_ID) self.assertEqual(message.body, text) - self.assertEqual(message.formatted_body, '

Hello

') + self.assertEqual(message.formatted_body, "

Hello

") From e2d85eaa21db44252b72814567e70a9eaf679585 Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Tue, 13 Jul 2021 11:43:01 +0200 Subject: [PATCH 027/184] details --- .github/workflows/lint.yml | 2 +- .github/workflows/test.yml | 9 +++------ 2 files changed, 4 insertions(+), 7 deletions(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 5d7dfbf..dbeacb5 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -1,4 +1,4 @@ -name: Lint +name: Lints on: [push, pull_request] jobs: lint: diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 1e1a427..c71fb8e 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -4,9 +4,6 @@ jobs: tests: runs-on: ubuntu-latest steps: - - name: Checkout - uses: actions/checkout@v2 - - name: Tests - run: docker-compose -f test.yml up --exit-code-from tests - - name: Coverage - uses: codecov/codecov-action@v1 + - uses: actions/checkout@v2 + - run: docker-compose -f test.yml up --exit-code-from tests + - uses: codecov/codecov-action@v1 From 2bda1d59688d02969e337c2ad60389c6b042ab47 Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Tue, 13 Jul 2021 11:54:24 +0200 Subject: [PATCH 028/184] badges --- README.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/README.md b/README.md index 7e6229b..cc927e1 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,10 @@ # Matrix Webhook +![Tests](https://github.com/nim65s/matrix-webhook/actions/workflows/test/badge.svg) +![Lints](https://github.com/nim65s/matrix-webhook/actions/workflows/lint/badge.svg) +[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) +[![codecov](https://codecov.io/gh/nim65s/matrix-webhook/branch/master/graph/badge.svg?token=BLGISGCYKG)](https://codecov.io/gh/nim65s/matrix-webhook) + Post a message to a matrix room with a simple HTTP POST ## Configuration From 044876daf6146929ece10d39f78f7060a12b3e3e Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Tue, 13 Jul 2021 12:28:01 +0200 Subject: [PATCH 029/184] fix badges --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index cc927e1..02ebc4d 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ # Matrix Webhook -![Tests](https://github.com/nim65s/matrix-webhook/actions/workflows/test/badge.svg) -![Lints](https://github.com/nim65s/matrix-webhook/actions/workflows/lint/badge.svg) +[![Tests](https://github.com/nim65s/matrix-webhook/actions/workflows/test.yml/badge.svg)](https://github.com/nim65s/matrix-webhook/actions/workflows/test.yml) +[![Lints](https://github.com/nim65s/matrix-webhook/actions/workflows/lint.yml/badge.svg)](https://github.com/nim65s/matrix-webhook/actions/workflows/lint.yml) [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) [![codecov](https://codecov.io/gh/nim65s/matrix-webhook/branch/master/graph/badge.svg?token=BLGISGCYKG)](https://codecov.io/gh/nim65s/matrix-webhook) From 2c8c618fe0e2a3333022d1524f6d3d2c5814923a Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Wed, 14 Jul 2021 17:12:55 +0200 Subject: [PATCH 030/184] setup argparse & logging --- matrix_webhook.py | 84 ++++++++++++++++++++++++++++++++++++++++++----- 1 file changed, 75 insertions(+), 9 deletions(-) diff --git a/matrix_webhook.py b/matrix_webhook.py index 5540f59..f754680 100755 --- a/matrix_webhook.py +++ b/matrix_webhook.py @@ -3,12 +3,12 @@ Matrix Webhook. Post a message to a matrix room with a simple HTTP POST -v1: matrix-client & http.server -v2: matrix-nio & aiohttp & markdown """ +import argparse import asyncio import json +import logging import os from http import HTTPStatus from signal import SIGINT, SIGTERM @@ -18,12 +18,69 @@ from markdown import markdown from nio import AsyncClient from nio.exceptions import LocalProtocolError -SERVER_ADDRESS = (os.environ.get("HOST", ""), int(os.environ.get("PORT", 4785))) -MATRIX_URL = os.environ.get("MATRIX_URL", "https://matrix.org") -MATRIX_ID = os.environ.get("MATRIX_ID", "@wwm:matrix.org") -MATRIX_PW = os.environ["MATRIX_PW"] -API_KEY = os.environ["API_KEY"] -CLIENT = AsyncClient(MATRIX_URL, MATRIX_ID) +parser = argparse.ArgumentParser(description=__doc__) +parser.add_argument( + "-H", + "--host", + default=os.environ.get("HOST", ""), + help="host to listen to. Default: `''`. Environment variable: `HOST`", +) +parser.add_argument( + "-P", + "--port", + type=int, + default=os.environ.get("PORT", 4785), + help="port to listed to. Default: 4785. Environment variable: `PORT`", +) +parser.add_argument( + "-u", + "--matrix-url", + default=os.environ.get("MATRIX_URL", "https://matrix.org"), + help="matrix homeserver url. Default: `https://matrix.org`. Environment variable: `MATRIX_URL`", +) +parser.add_argument( + "-i", + "--matrix-id", + help="matrix user-id. Required. Environment variable: `MATRIX_ID`", + **( + {"default": os.environ["MATRIX_ID"]} + if "MATRIX_ID" in os.environ + else {"required": True} + ), +) +parser.add_argument( + "-p", + "--matrix-pw", + help="matrix password. Required. Environment variable: `MATRIX_PW`", + **( + {"default": os.environ["MATRIX_PW"]} + if "MATRIX_PW" in os.environ + else {"required": True} + ), +) +parser.add_argument( + "-k", + "--api-key", + help="shared secret to use this service. Required. Environment variable: `API_KEY`", + **( + {"default": os.environ["API_KEY"]} + if "API_KEY" in os.environ + else {"required": True} + ), +) +parser.add_argument( + "-v", "--verbose", action="count", default=0, help="increment verbosity level" +) + +args = parser.parse_args() +logging.basicConfig(level=50 - 10 * args.verbose) + +SERVER_ADDRESS = (args.host, args.port) +MATRIX_URL = args.matrix_url +MATRIX_ID = args.matrix_id +MATRIX_PW = args.matrix_pw +API_KEY = args.api_key +CLIENT = AsyncClient(args.matrix_url, args.matrix_id) async def handler(request): @@ -32,6 +89,7 @@ async def handler(request): This one handles a POST, checks its content, and forwards it to the matrix room. """ + logging.debug(f"Handling {request=}") data = await request.read() try: @@ -56,7 +114,9 @@ async def handler(request): } try: await send_room_message(room_id, content) - except LocalProtocolError: # Connection lost, try another login + except LocalProtocolError as e: # Connection lost, try another login + logging.error(f"Send error: {e}") + logging.warning("Reconnecting and trying again") await CLIENT.login(MATRIX_PW) await send_room_message(room_id, content) @@ -65,12 +125,14 @@ async def handler(request): def create_json_response(status, ret): """Create a JSON response.""" + logging.debug(f"Creating json response: {status=}, {ret=}") response_data = {"status": status, "ret": ret} return web.json_response(response_data, status=status) async def send_room_message(room_id, content): """Send a message to a room.""" + logging.debug(f"Sending room message in {room_id=}: {content=}") return await CLIENT.room_send( room_id=room_id, message_type="m.room.message", content=content ) @@ -82,11 +144,13 @@ async def main(event): matrix client login & start web server """ + logging.info(f"Log in {MATRIX_ID=} on {MATRIX_URL=}") await CLIENT.login(MATRIX_PW) server = web.Server(handler) runner = web.ServerRunner(server) await runner.setup() + logging.info(f"Binding on {SERVER_ADDRESS=}") site = web.TCPSite(runner, *SERVER_ADDRESS) await site.start() @@ -106,6 +170,7 @@ def terminate(event, signal): def run(): """Launch everything.""" + logging.info("Matrix Webhook starting...") loop = asyncio.get_event_loop() event = asyncio.Event() @@ -114,6 +179,7 @@ def run(): loop.run_until_complete(main(event)) + logging.info("Matrix Webhook closing...") loop.close() From a0a78bbad097a36101d205bf7588758ab00a9832 Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Wed, 14 Jul 2021 20:00:52 +0200 Subject: [PATCH 031/184] improve logging --- matrix_webhook.py | 22 ++++++++++++---------- tests/Dockerfile | 2 +- tests/start.py | 20 +++++++++++++++++++- 3 files changed, 32 insertions(+), 12 deletions(-) diff --git a/matrix_webhook.py b/matrix_webhook.py index f754680..c52fe43 100755 --- a/matrix_webhook.py +++ b/matrix_webhook.py @@ -73,7 +73,6 @@ parser.add_argument( ) args = parser.parse_args() -logging.basicConfig(level=50 - 10 * args.verbose) SERVER_ADDRESS = (args.host, args.port) MATRIX_URL = args.matrix_url @@ -81,6 +80,7 @@ MATRIX_ID = args.matrix_id MATRIX_PW = args.matrix_pw API_KEY = args.api_key CLIENT = AsyncClient(args.matrix_url, args.matrix_id) +LOGGER = logging.getLogger("matrix-webhook") async def handler(request): @@ -89,7 +89,7 @@ async def handler(request): This one handles a POST, checks its content, and forwards it to the matrix room. """ - logging.debug(f"Handling {request=}") + LOGGER.debug(f"Handling {request=}") data = await request.read() try: @@ -115,8 +115,8 @@ async def handler(request): try: await send_room_message(room_id, content) except LocalProtocolError as e: # Connection lost, try another login - logging.error(f"Send error: {e}") - logging.warning("Reconnecting and trying again") + LOGGER.error(f"Send error: {e}") + LOGGER.warning("Reconnecting and trying again") await CLIENT.login(MATRIX_PW) await send_room_message(room_id, content) @@ -125,14 +125,14 @@ async def handler(request): def create_json_response(status, ret): """Create a JSON response.""" - logging.debug(f"Creating json response: {status=}, {ret=}") + LOGGER.debug(f"Creating json response: {status=}, {ret=}") response_data = {"status": status, "ret": ret} return web.json_response(response_data, status=status) async def send_room_message(room_id, content): """Send a message to a room.""" - logging.debug(f"Sending room message in {room_id=}: {content=}") + LOGGER.debug(f"Sending room message in {room_id=}: {content=}") return await CLIENT.room_send( room_id=room_id, message_type="m.room.message", content=content ) @@ -144,13 +144,13 @@ async def main(event): matrix client login & start web server """ - logging.info(f"Log in {MATRIX_ID=} on {MATRIX_URL=}") + LOGGER.info(f"Log in {MATRIX_ID=} on {MATRIX_URL=}") await CLIENT.login(MATRIX_PW) server = web.Server(handler) runner = web.ServerRunner(server) await runner.setup() - logging.info(f"Binding on {SERVER_ADDRESS=}") + LOGGER.info(f"Binding on {SERVER_ADDRESS=}") site = web.TCPSite(runner, *SERVER_ADDRESS) await site.start() @@ -170,7 +170,7 @@ def terminate(event, signal): def run(): """Launch everything.""" - logging.info("Matrix Webhook starting...") + LOGGER.info("Starting...") loop = asyncio.get_event_loop() event = asyncio.Event() @@ -179,9 +179,11 @@ def run(): loop.run_until_complete(main(event)) - logging.info("Matrix Webhook closing...") + LOGGER.info("Closing...") loop.close() if __name__ == "__main__": + log_format = "%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(message)s" + logging.basicConfig(level=50 - 10 * args.verbose, format=log_format) run() diff --git a/tests/Dockerfile b/tests/Dockerfile index 0ff4cb2..42b60b9 100644 --- a/tests/Dockerfile +++ b/tests/Dockerfile @@ -17,4 +17,4 @@ RUN pip install --no-cache-dir markdown matrix-nio httpx coverage WORKDIR /app -CMD ./tests/start.py +CMD ./tests/start.py -vvv diff --git a/tests/start.py b/tests/start.py index 27c184c..3124927 100755 --- a/tests/start.py +++ b/tests/start.py @@ -1,6 +1,8 @@ #!/usr/bin/env python """Entry point to start an instrumentalized bot for coverage and run tests.""" +import argparse +import logging from os import environ from subprocess import Popen, run from time import time @@ -15,6 +17,12 @@ KEY, MATRIX_URL, MATRIX_ID, MATRIX_PW = ( environ[v] for v in ["API_KEY", "MATRIX_URL", "MATRIX_ID", "MATRIX_PW"] ) FULL_ID = f'@{MATRIX_ID}:{MATRIX_URL.split("/")[2]}' +LOGGER = logging.getLogger("matrix-webhook.tests.start") + +parser = argparse.ArgumentParser(description=__doc__) +parser.add_argument( + "-v", "--verbose", action="count", default=0, help="increment verbosity level" +) def bot_req(req=None, key=None, room_id=None): @@ -47,6 +55,7 @@ def wait_available(url: str, key: str, timeout: int = 10) -> bool: def run_and_test(): """Launch the bot and its tests.""" # Start the server, and wait for it + LOGGER.info("Spawning synapse") srv = Popen( [ "python", @@ -60,29 +69,38 @@ def run_and_test(): return False # Register a user for the bot. + LOGGER.info("Registering the bot") with open("/srv/homeserver.yaml") as f: secret = yaml.safe_load(f.read()).get("registration_shared_secret", None) request_registration(MATRIX_ID, MATRIX_PW, MATRIX_URL, secret, admin=True) # Start the bot, and wait for it - bot = Popen(["coverage", "run", "matrix_webhook.py"]) + LOGGER.info("Spawning the bot") + bot = Popen(["coverage", "run", "matrix_webhook.py", "-vvvvv"]) if not wait_available(BOT_URL, "status"): return False # Run the main unittest module + LOGGER.info("Runnig unittests") ret = main(module=None, exit=False).result.wasSuccessful() + LOGGER.info("Stopping synapse") srv.terminate() # TODO Check what the bot says when the server is offline # print(bot_req({'text': 'bye'}, KEY), {'status': 200, 'ret': 'OK'}) + LOGGER.info("Stopping the bot") bot.terminate() + LOGGER.info("Processing coverage") for cmd in ["report", "html", "xml"]: run(["coverage", cmd]) return ret if __name__ == "__main__": + args = parser.parse_args() + log_format = "%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(message)s" + logging.basicConfig(level=50 - 10 * args.verbose, format=log_format) exit(not run_and_test()) From 0c0a42a4c91e69252523c854197d9a75298b9e29 Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Wed, 14 Jul 2021 23:25:24 +0200 Subject: [PATCH 032/184] improve error management and tests --- matrix_webhook.py | 29 ++++++++++++++++++++++------- tests/tests.py | 20 +++++++++++++++++--- 2 files changed, 39 insertions(+), 10 deletions(-) diff --git a/matrix_webhook.py b/matrix_webhook.py index c52fe43..70d3234 100755 --- a/matrix_webhook.py +++ b/matrix_webhook.py @@ -17,6 +17,7 @@ from aiohttp import web from markdown import markdown from nio import AsyncClient from nio.exceptions import LocalProtocolError +from nio.responses import RoomSendError parser = argparse.ArgumentParser(description=__doc__) parser.add_argument( @@ -81,6 +82,7 @@ MATRIX_PW = args.matrix_pw API_KEY = args.api_key CLIENT = AsyncClient(args.matrix_url, args.matrix_id) LOGGER = logging.getLogger("matrix-webhook") +ERROR_MAP = {"M_FORBIDDEN": HTTPStatus.FORBIDDEN} async def handler(request): @@ -112,13 +114,26 @@ async def handler(request): "format": "org.matrix.custom.html", "formatted_body": markdown(str(data["text"]), extensions=["extra"]), } - try: - await send_room_message(room_id, content) - except LocalProtocolError as e: # Connection lost, try another login - LOGGER.error(f"Send error: {e}") - LOGGER.warning("Reconnecting and trying again") - await CLIENT.login(MATRIX_PW) - await send_room_message(room_id, content) + for _ in range(10): + try: + resp = await send_room_message(room_id, content) + if isinstance(resp, RoomSendError): + if resp.status_code == "M_UNKNOWN_TOKEN": + LOGGER.warning("Reconnecting") + await CLIENT.login(MATRIX_PW) + else: + return create_json_response( + ERROR_MAP[resp.status_code], resp.message + ) + else: + break + except LocalProtocolError as e: + LOGGER.error(f"Send error: {e}") + LOGGER.warning("Trying again") + else: + return create_json_response( + HTTPStatus.GATEWAY_TIMEOUT, "Homeserver not responding" + ) return create_json_response(HTTPStatus.OK, "OK") diff --git a/tests/tests.py b/tests/tests.py index f6ec6f9..c92fb95 100644 --- a/tests/tests.py +++ b/tests/tests.py @@ -20,9 +20,11 @@ class BotTest(unittest.IsolatedAsyncioTestCase): self.assertEqual( bot_req({"text": 3, "key": None}), {"status": 401, "ret": "Invalid API key"} ) - - # TODO: we are not sending to a real room, so this should not be "OK" - self.assertEqual(bot_req({"text": 3}, KEY), {"status": 200, "ret": "OK"}) + # TODO: if the client from matrix_webhook has olm support, this won't be a 403 from synapse, + # but a LocalProtocolError from matrix_webhook + self.assertEqual( + bot_req({"text": 3}, KEY), {"status": 403, "ret": "Unknown room"} + ) async def test_message(self): """Send a markdown message, and check the result.""" @@ -45,3 +47,15 @@ class BotTest(unittest.IsolatedAsyncioTestCase): self.assertEqual(message.sender, FULL_ID) self.assertEqual(message.body, text) self.assertEqual(message.formatted_body, "

Hello

") + + async def test_reconnect(self): + """Check the reconnecting path.""" + client = nio.AsyncClient(MATRIX_URL, MATRIX_ID) + await client.login(MATRIX_PW) + room = await client.room_create() + await client.logout(all_devices=True) + await client.close() + self.assertEqual( + bot_req({"text": "Re"}, KEY, room.room_id), + {"status": 200, "ret": "OK"}, + ) From a974f073c96f645f79afee543a3b2aefe1bac74b Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Sun, 18 Jul 2021 16:52:39 +0200 Subject: [PATCH 033/184] split code --- Dockerfile | 4 +- .../__main__.py | 82 +++---------------- matrix_webhook/conf.py | 66 +++++++++++++++ tests/start.py | 2 +- 4 files changed, 80 insertions(+), 74 deletions(-) rename matrix_webhook.py => matrix_webhook/__main__.py (63%) create mode 100644 matrix_webhook/conf.py diff --git a/Dockerfile b/Dockerfile index da6ffa2..688b216 100644 --- a/Dockerfile +++ b/Dockerfile @@ -4,6 +4,6 @@ EXPOSE 4785 RUN pip install --no-cache-dir markdown matrix-nio -ADD matrix_webhook.py / +ADD matrix_webhook . -CMD /matrix_webhook.py +CMD python -m matrix_webhook diff --git a/matrix_webhook.py b/matrix_webhook/__main__.py similarity index 63% rename from matrix_webhook.py rename to matrix_webhook/__main__.py index 70d3234..7b7db33 100755 --- a/matrix_webhook.py +++ b/matrix_webhook/__main__.py @@ -5,11 +5,9 @@ Matrix Webhook. Post a message to a matrix room with a simple HTTP POST """ -import argparse import asyncio import json import logging -import os from http import HTTPStatus from signal import SIGINT, SIGTERM @@ -19,71 +17,13 @@ from nio import AsyncClient from nio.exceptions import LocalProtocolError from nio.responses import RoomSendError -parser = argparse.ArgumentParser(description=__doc__) -parser.add_argument( - "-H", - "--host", - default=os.environ.get("HOST", ""), - help="host to listen to. Default: `''`. Environment variable: `HOST`", -) -parser.add_argument( - "-P", - "--port", - type=int, - default=os.environ.get("PORT", 4785), - help="port to listed to. Default: 4785. Environment variable: `PORT`", -) -parser.add_argument( - "-u", - "--matrix-url", - default=os.environ.get("MATRIX_URL", "https://matrix.org"), - help="matrix homeserver url. Default: `https://matrix.org`. Environment variable: `MATRIX_URL`", -) -parser.add_argument( - "-i", - "--matrix-id", - help="matrix user-id. Required. Environment variable: `MATRIX_ID`", - **( - {"default": os.environ["MATRIX_ID"]} - if "MATRIX_ID" in os.environ - else {"required": True} - ), -) -parser.add_argument( - "-p", - "--matrix-pw", - help="matrix password. Required. Environment variable: `MATRIX_PW`", - **( - {"default": os.environ["MATRIX_PW"]} - if "MATRIX_PW" in os.environ - else {"required": True} - ), -) -parser.add_argument( - "-k", - "--api-key", - help="shared secret to use this service. Required. Environment variable: `API_KEY`", - **( - {"default": os.environ["API_KEY"]} - if "API_KEY" in os.environ - else {"required": True} - ), -) -parser.add_argument( - "-v", "--verbose", action="count", default=0, help="increment verbosity level" -) +from . import conf -args = parser.parse_args() - -SERVER_ADDRESS = (args.host, args.port) -MATRIX_URL = args.matrix_url -MATRIX_ID = args.matrix_id -MATRIX_PW = args.matrix_pw -API_KEY = args.api_key -CLIENT = AsyncClient(args.matrix_url, args.matrix_id) -LOGGER = logging.getLogger("matrix-webhook") ERROR_MAP = {"M_FORBIDDEN": HTTPStatus.FORBIDDEN} +CLIENT = AsyncClient(conf.MATRIX_URL, conf.MATRIX_ID) +LOGGER = logging.getLogger("matrix-webhook") + async def handler(request): """ @@ -104,7 +44,7 @@ async def handler(request): HTTPStatus.BAD_REQUEST, "Missing text and/or API key property" ) - if data["key"] != API_KEY: + if data["key"] != conf.API_KEY: return create_json_response(HTTPStatus.UNAUTHORIZED, "Invalid API key") room_id = request.path[1:] @@ -120,7 +60,7 @@ async def handler(request): if isinstance(resp, RoomSendError): if resp.status_code == "M_UNKNOWN_TOKEN": LOGGER.warning("Reconnecting") - await CLIENT.login(MATRIX_PW) + await CLIENT.login(conf.MATRIX_PW) else: return create_json_response( ERROR_MAP[resp.status_code], resp.message @@ -159,14 +99,14 @@ async def main(event): matrix client login & start web server """ - LOGGER.info(f"Log in {MATRIX_ID=} on {MATRIX_URL=}") - await CLIENT.login(MATRIX_PW) + LOGGER.info(f"Log in {conf.MATRIX_ID=} on {conf.MATRIX_URL=}") + await CLIENT.login(conf.MATRIX_PW) server = web.Server(handler) runner = web.ServerRunner(server) await runner.setup() - LOGGER.info(f"Binding on {SERVER_ADDRESS=}") - site = web.TCPSite(runner, *SERVER_ADDRESS) + LOGGER.info(f"Binding on {conf.SERVER_ADDRESS=}") + site = web.TCPSite(runner, *conf.SERVER_ADDRESS) await site.start() # Run until we get a shutdown request @@ -200,5 +140,5 @@ def run(): if __name__ == "__main__": log_format = "%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(message)s" - logging.basicConfig(level=50 - 10 * args.verbose, format=log_format) + logging.basicConfig(level=50 - 10 * conf.VERBOSE, format=log_format) run() diff --git a/matrix_webhook/conf.py b/matrix_webhook/conf.py new file mode 100644 index 0000000..ae81672 --- /dev/null +++ b/matrix_webhook/conf.py @@ -0,0 +1,66 @@ +"""Configuration for Matrix Webhook.""" +import argparse +import os + +parser = argparse.ArgumentParser(description=__doc__) +parser.add_argument( + "-H", + "--host", + default=os.environ.get("HOST", ""), + help="host to listen to. Default: `''`. Environment variable: `HOST`", +) +parser.add_argument( + "-P", + "--port", + type=int, + default=os.environ.get("PORT", 4785), + help="port to listed to. Default: 4785. Environment variable: `PORT`", +) +parser.add_argument( + "-u", + "--matrix-url", + default=os.environ.get("MATRIX_URL", "https://matrix.org"), + help="matrix homeserver url. Default: `https://matrix.org`. Environment variable: `MATRIX_URL`", +) +parser.add_argument( + "-i", + "--matrix-id", + help="matrix user-id. Required. Environment variable: `MATRIX_ID`", + **( + {"default": os.environ["MATRIX_ID"]} + if "MATRIX_ID" in os.environ + else {"required": True} + ), +) +parser.add_argument( + "-p", + "--matrix-pw", + help="matrix password. Required. Environment variable: `MATRIX_PW`", + **( + {"default": os.environ["MATRIX_PW"]} + if "MATRIX_PW" in os.environ + else {"required": True} + ), +) +parser.add_argument( + "-k", + "--api-key", + help="shared secret to use this service. Required. Environment variable: `API_KEY`", + **( + {"default": os.environ["API_KEY"]} + if "API_KEY" in os.environ + else {"required": True} + ), +) +parser.add_argument( + "-v", "--verbose", action="count", default=0, help="increment verbosity level" +) + +args = parser.parse_args() + +SERVER_ADDRESS = (args.host, args.port) +MATRIX_URL = args.matrix_url +MATRIX_ID = args.matrix_id +MATRIX_PW = args.matrix_pw +API_KEY = args.api_key +VERBOSE = args.verbose diff --git a/tests/start.py b/tests/start.py index 3124927..3df3418 100755 --- a/tests/start.py +++ b/tests/start.py @@ -76,7 +76,7 @@ def run_and_test(): # Start the bot, and wait for it LOGGER.info("Spawning the bot") - bot = Popen(["coverage", "run", "matrix_webhook.py", "-vvvvv"]) + bot = Popen(["coverage", "run", "-m", "matrix_webhook", "-vvvvv"]) if not wait_available(BOT_URL, "status"): return False From bc646b9f4a3311311a7637aa7389a88ee0fd64f9 Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Sun, 18 Jul 2021 16:55:23 +0200 Subject: [PATCH 034/184] update year --- LICENSE | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/LICENSE b/LICENSE index 939f05c..8960340 100644 --- a/LICENSE +++ b/LICENSE @@ -1,4 +1,4 @@ -Copyright (c) 2019-2020 tetaneutral.net All rights reserved. +Copyright (c) 2019-2021 tetaneutral.net All rights reserved. BSD 2 Clause License From 00997360ebf8b43161ab541d6e0e6e2b69960fce Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Sun, 18 Jul 2021 16:55:54 +0200 Subject: [PATCH 035/184] fix permissions --- matrix_webhook/__main__.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) mode change 100755 => 100644 matrix_webhook/__main__.py diff --git a/matrix_webhook/__main__.py b/matrix_webhook/__main__.py old mode 100755 new mode 100644 From 922ebf5c7807569b6141e4fc731d65e87d92cbea Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Sun, 18 Jul 2021 16:56:59 +0200 Subject: [PATCH 036/184] poetry update --- poetry.lock | 38 +++++++++++++++++++------------------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/poetry.lock b/poetry.lock index fcf852f..c600d93 100644 --- a/poetry.lock +++ b/poetry.lock @@ -87,7 +87,7 @@ tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (> [[package]] name = "black" -version = "21.6b0" +version = "21.7b0" description = "The uncompromising code formatter." category = "dev" optional = false @@ -99,7 +99,7 @@ click = ">=7.1.2" mypy-extensions = ">=0.4.3" pathspec = ">=0.8.1,<1" regex = ">=2020.1.8" -toml = ">=0.10.1" +tomli = ">=0.2.6,<2.0.0" [package.extras] colorama = ["colorama (>=0.4.3)"] @@ -301,7 +301,7 @@ testing = ["coverage", "pyyaml"] [[package]] name = "matrix-nio" -version = "0.18.3" +version = "0.18.4" description = "A Python Matrix client library, designed according to sans I/O principles." category = "main" optional = false @@ -320,7 +320,7 @@ pycryptodome = ">=3.10.1,<4.0.0" unpaddedbase64 = ">=2.1.0,<3.0.0" [package.extras] -e2e = ["python-olm (>=3.1.3,<4.0.0)", "peewee (>=3.14.4,<4.0.0)", "cachetools (>=4.2.1,<5.0.0)", "atomicwrites (>=1.4.0,<2.0.0)"] +e2e = ["atomicwrites (>=1.4.0,<2.0.0)", "cachetools (>=4.2.1,<5.0.0)", "peewee (>=3.14.4,<4.0.0)", "python-olm (>=3.1.3,<4.0.0)"] [[package]] name = "mccabe" @@ -348,11 +348,11 @@ python-versions = "*" [[package]] name = "pathspec" -version = "0.8.1" +version = "0.9.0" description = "Utility library for gitignore style pattern matching of file paths." category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" [[package]] name = "pycodestyle" @@ -463,12 +463,12 @@ optional = false python-versions = "*" [[package]] -name = "toml" -version = "0.10.2" -description = "Python Library for Tom's Obvious, Minimal Language" +name = "tomli" +version = "1.0.4" +description = "A lil' TOML parser" category = "dev" optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +python-versions = ">=3.6" [[package]] name = "typing-extensions" @@ -568,8 +568,8 @@ attrs = [ {file = "attrs-21.2.0.tar.gz", hash = "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb"}, ] black = [ - {file = "black-21.6b0-py3-none-any.whl", hash = "sha256:dfb8c5a069012b2ab1e972e7b908f5fb42b6bbabcba0a788b86dc05067c7d9c7"}, - {file = "black-21.6b0.tar.gz", hash = "sha256:dc132348a88d103016726fe360cb9ede02cecf99b76e3660ce6c596be132ce04"}, + {file = "black-21.7b0-py3-none-any.whl", hash = "sha256:1c7aa6ada8ee864db745b22790a32f94b2795c253a75d6d9b5e439ff10d23116"}, + {file = "black-21.7b0.tar.gz", hash = "sha256:c8373c6491de9362e39271630b65b964607bc5c79c83783547d76c839b3aa219"}, ] certifi = [ {file = "certifi-2021.5.30-py2.py3-none-any.whl", hash = "sha256:50b1e4f8446b06f41be7dd6338db18e0990601dce795c2b1686458aa7e8fa7d8"}, @@ -696,8 +696,8 @@ markdown = [ {file = "Markdown-3.3.4.tar.gz", hash = "sha256:31b5b491868dcc87d6c24b7e3d19a0d730d59d3e46f4eea6430a321bed387a49"}, ] matrix-nio = [ - {file = "matrix-nio-0.18.3.tar.gz", hash = "sha256:7f2e92f5b219367e47824bfe8bd2b1a06ce83ae28956f112dd3c2112a4d27085"}, - {file = "matrix_nio-0.18.3-py3-none-any.whl", hash = "sha256:a28653f96760b045c7edc53b645872cf2facc1639dc8cf56d748cd5e54ed2d3d"}, + {file = "matrix-nio-0.18.4.tar.gz", hash = "sha256:e5f0a62ff66474f5c56dc40c3eb3c74a29943800589ae6947ea224c288f3ab41"}, + {file = "matrix_nio-0.18.4-py3-none-any.whl", hash = "sha256:7ea00ae362a3621624b8ff463a2b06cb945ffa12e2f3919cae5321d06285a361"}, ] mccabe = [ {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, @@ -747,8 +747,8 @@ mypy-extensions = [ {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, ] pathspec = [ - {file = "pathspec-0.8.1-py2.py3-none-any.whl", hash = "sha256:aa0cb481c4041bf52ffa7b0d8fa6cd3e88a2ca4879c533c9153882ee2556790d"}, - {file = "pathspec-0.8.1.tar.gz", hash = "sha256:86379d6b86d75816baba717e64b1a3a3469deb93bb76d613c9ce79edc5cb68fd"}, + {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"}, + {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"}, ] pycodestyle = [ {file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"}, @@ -880,9 +880,9 @@ snowballstemmer = [ {file = "snowballstemmer-2.1.0-py2.py3-none-any.whl", hash = "sha256:b51b447bea85f9968c13b650126a888aabd4cb4463fca868ec596826325dedc2"}, {file = "snowballstemmer-2.1.0.tar.gz", hash = "sha256:e997baa4f2e9139951b6f4c631bad912dfd3c792467e2f03d7239464af90e914"}, ] -toml = [ - {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, - {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +tomli = [ + {file = "tomli-1.0.4-py3-none-any.whl", hash = "sha256:0713b16ff91df8638a6a694e295c8159ab35ba93e3424a626dd5226d386057be"}, + {file = "tomli-1.0.4.tar.gz", hash = "sha256:be670d0d8d7570fd0ea0113bd7bb1ba3ac6706b4de062cc4c952769355c9c268"}, ] typing-extensions = [ {file = "typing_extensions-3.10.0.0-py2-none-any.whl", hash = "sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497"}, From c9045d407d89776ebc49b7c98e928833179e115b Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Sun, 18 Jul 2021 18:12:08 +0200 Subject: [PATCH 037/184] update README --- Dockerfile | 4 +-- README.md | 62 ++++++++++++++++++++++++++++++-------- matrix_webhook/__main__.py | 1 - matrix_webhook/conf.py | 2 +- 4 files changed, 53 insertions(+), 16 deletions(-) diff --git a/Dockerfile b/Dockerfile index 688b216..2eb8198 100644 --- a/Dockerfile +++ b/Dockerfile @@ -4,6 +4,6 @@ EXPOSE 4785 RUN pip install --no-cache-dir markdown matrix-nio -ADD matrix_webhook . +ADD matrix_webhook matrix_webhook -CMD python -m matrix_webhook +ENTRYPOINT ["python", "-m", "matrix_webhook"] diff --git a/README.md b/README.md index 02ebc4d..d9517ed 100644 --- a/README.md +++ b/README.md @@ -7,27 +7,59 @@ Post a message to a matrix room with a simple HTTP POST -## Configuration +## Install -Create a matrix user for the bot, make it join the rooms you want it to talk into, and then set the following -environment variables: +``` +python3 -m pip install matrix-webhook +# OR +docker pull nim65s/matrix-webhook +``` + +## Start + +Create a matrix user for the bot, make it join the rooms you want it to talk into, and launch it with the following +arguments or environment variables: + +``` +python -m matrix_webhook -h +# OR +docker run --rm -it nim65s/matrix-webhook -h +``` + +``` +usage: python -m matrix_webhook [-h] [-H HOST] [-P PORT] [-u MATRIX_URL] -i MATRIX_ID -p MATRIX_PW -k API_KEY [-v] + +Configuration for Matrix Webhook. + + +optional arguments: + -h, --help show this help message and exit + -H HOST, --host HOST host to listen to. Default: `''`. Environment variable: `HOST` + -P PORT, --port PORT port to listed to. Default: 4785. Environment variable: `PORT` + -u MATRIX_URL, --matrix-url MATRIX_URL + matrix homeserver url. Default: `https://matrix.org`. Environment variable: `MATRIX_URL` + -i MATRIX_ID, --matrix-id MATRIX_ID + matrix user-id. Required. Environment variable: `MATRIX_ID` + -p MATRIX_PW, --matrix-pw MATRIX_PW + matrix password. Required. Environment variable: `MATRIX_PW` + -k API_KEY, --api-key API_KEY + shared secret to use this service. Required. Environment variable: `API_KEY` + -v, --verbose increment verbosity level +``` -- `MATRIX_URL`: the url of the matrix homeserver -- `MATRIX_ID`: the user id of the bot on this server -- `MATRIX_PW`: the password for this user -- `API_KEY`: a secret to share with the users of the service -- `HOST`: HOST to listen on, all interfaces if `''` (default). -- `PORT`: PORT to listed on, default to 4785. ## Dev ``` -pip3 install --user markdown matrix-nio -./matrix_webhook.py +poetry install +# or python3 -m pip install --user markdown matrix-nio +python3 -m matrix_webhook ``` ## Prod +A `docker-compose.yml` is provided: + - Use [Traefik](https://traefik.io/) on the `web` docker network, eg. with [proxyta.net](https://framagit.org/oxyta.net/proxyta.net) - Put the configuration into a `.env` file @@ -47,4 +79,10 @@ curl -d '{"text":"new contrib from toto: [44](http://radio.localhost/map/#44)", ## Test room -[#matrix-webhook:tetaneutral.net](https://matrix.to/#/!DPrUlnwOhBEfYwsDLh:matrix.org?via=laas.fr&via=tetaneutral.net&via=aen.im) +#matrix-webhook:tetaneutral.net](https://matrix.to/#/!DPrUlnwOhBEfYwsDLh:matrix.org?via=laas.fr&via=tetaneutral.net&via=aen.im) + +## Unit tests + +``` +docker-compose -f test.yml up --exit-code-from tests --force-recreate --build +``` diff --git a/matrix_webhook/__main__.py b/matrix_webhook/__main__.py index 7b7db33..2b4a7d7 100644 --- a/matrix_webhook/__main__.py +++ b/matrix_webhook/__main__.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python3 """ Matrix Webhook. diff --git a/matrix_webhook/conf.py b/matrix_webhook/conf.py index ae81672..fd39e02 100644 --- a/matrix_webhook/conf.py +++ b/matrix_webhook/conf.py @@ -2,7 +2,7 @@ import argparse import os -parser = argparse.ArgumentParser(description=__doc__) +parser = argparse.ArgumentParser(description=__doc__, prog="python -m matrix_webhook") parser.add_argument( "-H", "--host", From febf2f857c96c9569dd99516b4be04feaa7bc5c0 Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Sun, 18 Jul 2021 18:41:20 +0200 Subject: [PATCH 038/184] add changelog --- CHANGELOG.md | 33 +++++++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) create mode 100644 CHANGELOG.md diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..9dcb006 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,33 @@ +# Changelog +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [Unreleased] + +- Simplify code + in [#1](https://github.com/nim65s/matrix-webhook/pull/1) + by [@homeworkprod](https://github.com/homeworkprod) +- Update aiohttp use and docs + in [#5](https://github.com/nim65s/matrix-webhook/pull/5) + by [@svenseeberg](https://github.com/svenseeberg) +- Setup Tests, Coverage & CI ; update tooling + in [#7](https://github.com/nim65s/matrix-webhook/pull/7) + by [@nim65s](https://github.com/nim65s) +- Setup argparse & logging + in [#8](https://github.com/nim65s/matrix-webhook/pull/8) + by [@nim65s](https://github.com/nim65s) +- Setup packaging + in [#9](https://github.com/nim65s/matrix-webhook/pull/9) + by [@nim65s](https://github.com/nim65s) + +## [1.0.0] - 2020-03-14 +- Update to matrix-nio & aiohttp & markdown + +## [1.0.0] - 2020-02-14 +- First release with matrix-client & http.server + +[Unreleased]: https://github.com/nim65s/matrix-webhook/compare/v2.0.0...devel +[2.0.0]: https://github.com/nim65s/matrix-webhook/compare/v1.0.0...v2.0.0 +[1.0.0]: https://github.com/nim65s/matrix-webhook/releases/tag/v1.0.0 From 8a3bbef54c3b407b348f033808b5859d6da71b82 Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Sun, 18 Jul 2021 18:59:16 +0200 Subject: [PATCH 039/184] v3.0.0 --- CHANGELOG.md | 4 +++- pyproject.toml | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9dcb006..41eb596 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +## [3.0.0] - 2021-07-18 + - Simplify code in [#1](https://github.com/nim65s/matrix-webhook/pull/1) by [@homeworkprod](https://github.com/homeworkprod) @@ -22,7 +24,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 in [#9](https://github.com/nim65s/matrix-webhook/pull/9) by [@nim65s](https://github.com/nim65s) -## [1.0.0] - 2020-03-14 +## [2.0.0] - 2020-03-14 - Update to matrix-nio & aiohttp & markdown ## [1.0.0] - 2020-02-14 diff --git a/pyproject.toml b/pyproject.toml index 8c2ae72..7f109e9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "matrix-webhook" -version = "2.1.0" +version = "3.0.0" description = "Post a message to a matrix room with a simple HTTP POST" authors = ["Guilhem Saurel "] license = "BSD-2-Clause" From fbcae98390d50b6f999a05ec7b9e330b715878d8 Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Sun, 18 Jul 2021 19:31:46 +0200 Subject: [PATCH 040/184] add metadata on PyPI through pyproject.toml --- pyproject.toml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 7f109e9..1256ae8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,6 +4,9 @@ version = "3.0.0" description = "Post a message to a matrix room with a simple HTTP POST" authors = ["Guilhem Saurel "] license = "BSD-2-Clause" +readme = "README.md" +homepage = "https://github.com/nim65s/matrix-webhook" +repository = "https://github.com/nim65s/matrix-webhook.git" [tool.poetry.dependencies] python = "^3.9" From dcc73dfc81639d408fb3727bec053d0a514a57a2 Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Sun, 18 Jul 2021 19:55:15 +0200 Subject: [PATCH 041/184] setup action to build/publish on docker hub --- .github/workflows/publish.yml | 29 +++++++++++++++++++++++++++++ README.md | 1 + 2 files changed, 30 insertions(+) create mode 100644 .github/workflows/publish.yml diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml new file mode 100644 index 0000000..0ed948c --- /dev/null +++ b/.github/workflows/publish.yml @@ -0,0 +1,29 @@ +name: Publish Docker image + +on: + push: + branches: + - 'master' + - 'devel' + tags: + - 'v*' + +jobs: + docker-hub: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: docker/metadata-action@v3 + id: meta + with: + images: nim65s/matrix-webhook + - uses: docker/login-action@v1 + with: + username: nim65s + password: ${{ secrets.DOCKERHUB_TOKEN }} + - uses: docker/build-push-action@v2 + with: + context: . + push: true + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} diff --git a/README.md b/README.md index d9517ed..29a19cc 100644 --- a/README.md +++ b/README.md @@ -2,6 +2,7 @@ [![Tests](https://github.com/nim65s/matrix-webhook/actions/workflows/test.yml/badge.svg)](https://github.com/nim65s/matrix-webhook/actions/workflows/test.yml) [![Lints](https://github.com/nim65s/matrix-webhook/actions/workflows/lint.yml/badge.svg)](https://github.com/nim65s/matrix-webhook/actions/workflows/lint.yml) +[![Publish](https://github.com/nim65s/matrix-webhook/actions/workflows/publish.yml/badge.svg)](https://github.com/nim65s/matrix-webhook/actions/workflows/publish.yml) [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) [![codecov](https://codecov.io/gh/nim65s/matrix-webhook/branch/master/graph/badge.svg?token=BLGISGCYKG)](https://codecov.io/gh/nim65s/matrix-webhook) From 932965c8af3be1dcb12596622bbafbd68d8bfcd1 Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Sun, 18 Jul 2021 21:09:51 +0200 Subject: [PATCH 042/184] setup action to build/publish on PyPI --- .github/workflows/{publish.yml => docker-hub.yml} | 3 +-- .github/workflows/pypi.yml | 15 +++++++++++++++ README.md | 3 ++- pyproject.toml | 2 +- 4 files changed, 19 insertions(+), 4 deletions(-) rename .github/workflows/{publish.yml => docker-hub.yml} (93%) create mode 100644 .github/workflows/pypi.yml diff --git a/.github/workflows/publish.yml b/.github/workflows/docker-hub.yml similarity index 93% rename from .github/workflows/publish.yml rename to .github/workflows/docker-hub.yml index 0ed948c..8fa86ac 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/docker-hub.yml @@ -1,10 +1,9 @@ -name: Publish Docker image +name: Publish on: push: branches: - 'master' - - 'devel' tags: - 'v*' diff --git a/.github/workflows/pypi.yml b/.github/workflows/pypi.yml new file mode 100644 index 0000000..ee77ec4 --- /dev/null +++ b/.github/workflows/pypi.yml @@ -0,0 +1,15 @@ +name: PyPI + +on: + push: + tags: + - 'v*' + +jobs: + pypi: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - run: pip install -U poetry twine wheel + - run: poetry build + - run: twine upload --non-interactive -u __token__ -p ${{ secrets.PYPI_TOKEN }} dist/* diff --git a/README.md b/README.md index 29a19cc..a0fe1fe 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,8 @@ [![Tests](https://github.com/nim65s/matrix-webhook/actions/workflows/test.yml/badge.svg)](https://github.com/nim65s/matrix-webhook/actions/workflows/test.yml) [![Lints](https://github.com/nim65s/matrix-webhook/actions/workflows/lint.yml/badge.svg)](https://github.com/nim65s/matrix-webhook/actions/workflows/lint.yml) -[![Publish](https://github.com/nim65s/matrix-webhook/actions/workflows/publish.yml/badge.svg)](https://github.com/nim65s/matrix-webhook/actions/workflows/publish.yml) +[![Docker-Hub](https://github.com/nim65s/matrix-webhook/actions/workflows/docker-hub.yml/badge.svg)](https://hub.docker.com/r/nim65s/matrix-webhook) +[![PyPI](https://github.com/nim65s/matrix-webhook/actions/workflows/pypi.yml/badge.svg)](https://pypi.org/project/matrix-webhook/) [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) [![codecov](https://codecov.io/gh/nim65s/matrix-webhook/branch/master/graph/badge.svg?token=BLGISGCYKG)](https://codecov.io/gh/nim65s/matrix-webhook) diff --git a/pyproject.toml b/pyproject.toml index 1256ae8..b85b4ca 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,7 +9,7 @@ homepage = "https://github.com/nim65s/matrix-webhook" repository = "https://github.com/nim65s/matrix-webhook.git" [tool.poetry.dependencies] -python = "^3.9" +python = "^3.8" Markdown = "^3.3.4" matrix-nio = "^0.18.3" From 71c9c6cb0e7bb4ad11bacfda7c65f5b88f8367b9 Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Sun, 18 Jul 2021 22:53:29 +0200 Subject: [PATCH 043/184] changelog --- CHANGELOG.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 41eb596..b8dd191 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +- Publish on PyPI & Docker Hub with Github Actions + in [#10](https://github.com/nim65s/matrix-webhook/pull/10) + by [@nim65s](https://github.com/) + ## [3.0.0] - 2021-07-18 - Simplify code From a6b192fbd7b957002d995317f3229275d10b605e Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Sun, 18 Jul 2021 22:56:27 +0200 Subject: [PATCH 044/184] v3.1.0 --- CHANGELOG.md | 2 ++ pyproject.toml | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b8dd191..e5f6e7b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +## [3.1.0] - 2021-07-18 + - Publish on PyPI & Docker Hub with Github Actions in [#10](https://github.com/nim65s/matrix-webhook/pull/10) by [@nim65s](https://github.com/) diff --git a/pyproject.toml b/pyproject.toml index b85b4ca..4e38783 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "matrix-webhook" -version = "3.0.0" +version = "3.1.0" description = "Post a message to a matrix room with a simple HTTP POST" authors = ["Guilhem Saurel "] license = "BSD-2-Clause" From c86145f794c8cd45777b06a3fdfba6743579debb Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Sun, 18 Jul 2021 22:59:49 +0200 Subject: [PATCH 045/184] v3.1.1 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 4e38783..ab1b583 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "matrix-webhook" -version = "3.1.0" +version = "3.1.1" description = "Post a message to a matrix room with a simple HTTP POST" authors = ["Guilhem Saurel "] license = "BSD-2-Clause" From 8b32c972b8e97e37b6a948e7478f80fa96b5334b Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Sun, 18 Jul 2021 23:08:40 +0200 Subject: [PATCH 046/184] typo --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index a0fe1fe..29a96ea 100644 --- a/README.md +++ b/README.md @@ -81,7 +81,7 @@ curl -d '{"text":"new contrib from toto: [44](http://radio.localhost/map/#44)", ## Test room -#matrix-webhook:tetaneutral.net](https://matrix.to/#/!DPrUlnwOhBEfYwsDLh:matrix.org?via=laas.fr&via=tetaneutral.net&via=aen.im) +[#matrix-webhook:tetaneutral.net](https://matrix.to/#/!DPrUlnwOhBEfYwsDLh:matrix.org?via=laas.fr&via=tetaneutral.net&via=aen.im) ## Unit tests From 292d77274d84893c05d48cf101fa2d190944a919 Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Sat, 31 Jul 2021 11:21:29 +0200 Subject: [PATCH 047/184] update "text" key to "body" --- CHANGELOG.md | 2 ++ README.md | 2 +- matrix_webhook/__main__.py | 12 ++++++++---- tests/start.py | 2 +- tests/tests.py | 32 +++++++++++++++++++++++++++----- 5 files changed, 39 insertions(+), 11 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e5f6e7b..5c55982 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +- update "text" key to "body". + ## [3.1.0] - 2021-07-18 - Publish on PyPI & Docker Hub with Github Actions diff --git a/README.md b/README.md index 29a96ea..9f2442a 100644 --- a/README.md +++ b/README.md @@ -74,7 +74,7 @@ docker-compose up -d ## Test / Usage ``` -curl -d '{"text":"new contrib from toto: [44](http://radio.localhost/map/#44)", "key": "secret"}' \ +curl -d '{"body":"new contrib from toto: [44](http://radio.localhost/map/#44)", "key": "secret"}' \ 'http://matrixwebhook.localhost/!DPrUlnwOhBEfYwsDLh:matrix.org' ``` (or localhost:4785 without docker) diff --git a/matrix_webhook/__main__.py b/matrix_webhook/__main__.py index 2b4a7d7..2977f72 100644 --- a/matrix_webhook/__main__.py +++ b/matrix_webhook/__main__.py @@ -38,9 +38,13 @@ async def handler(request): except json.decoder.JSONDecodeError: return create_json_response(HTTPStatus.BAD_REQUEST, "Invalid JSON") - if not all(key in data for key in ["text", "key"]): + # legacy naming: + if "text" in data and "body" not in data: + data["body"] = data["text"] + + if not all(key in data for key in ["body", "key"]): return create_json_response( - HTTPStatus.BAD_REQUEST, "Missing text and/or API key property" + HTTPStatus.BAD_REQUEST, "Missing body and/or API key property" ) if data["key"] != conf.API_KEY: @@ -49,9 +53,9 @@ async def handler(request): room_id = request.path[1:] content = { "msgtype": "m.text", - "body": data["text"], + "body": data["body"], "format": "org.matrix.custom.html", - "formatted_body": markdown(str(data["text"]), extensions=["extra"]), + "formatted_body": markdown(str(data["body"]), extensions=["extra"]), } for _ in range(10): try: diff --git a/tests/start.py b/tests/start.py index 3df3418..3acb3b5 100755 --- a/tests/start.py +++ b/tests/start.py @@ -88,7 +88,7 @@ def run_and_test(): srv.terminate() # TODO Check what the bot says when the server is offline - # print(bot_req({'text': 'bye'}, KEY), {'status': 200, 'ret': 'OK'}) + # print(bot_req({'data': 'bye'}, KEY), {'status': 200, 'ret': 'OK'}) LOGGER.info("Stopping the bot") bot.terminate() diff --git a/tests/tests.py b/tests/tests.py index c92fb95..096dace 100644 --- a/tests/tests.py +++ b/tests/tests.py @@ -15,19 +15,19 @@ class BotTest(unittest.IsolatedAsyncioTestCase): self.assertEqual(bot_req(), {"status": 400, "ret": "Invalid JSON"}) self.assertEqual( bot_req({"toto": 3}), - {"status": 400, "ret": "Missing text and/or API key property"}, + {"status": 400, "ret": "Missing body and/or API key property"}, ) self.assertEqual( - bot_req({"text": 3, "key": None}), {"status": 401, "ret": "Invalid API key"} + bot_req({"body": 3, "key": None}), {"status": 401, "ret": "Invalid API key"} ) # TODO: if the client from matrix_webhook has olm support, this won't be a 403 from synapse, # but a LocalProtocolError from matrix_webhook self.assertEqual( - bot_req({"text": 3}, KEY), {"status": 403, "ret": "Unknown room"} + bot_req({"body": 3}, KEY), {"status": 403, "ret": "Unknown room"} ) async def test_message(self): - """Send a markdown message, and check the result.""" + """Send a markdown message with the old format, and check the result.""" text = "# Hello" messages = [] client = nio.AsyncClient(MATRIX_URL, MATRIX_ID) @@ -48,6 +48,28 @@ class BotTest(unittest.IsolatedAsyncioTestCase): self.assertEqual(message.body, text) self.assertEqual(message.formatted_body, "

Hello

") + async def test_markdown_body(self): + """Send a markdown message, and check the result.""" + body = "# Hello" + messages = [] + client = nio.AsyncClient(MATRIX_URL, MATRIX_ID) + + await client.login(MATRIX_PW) + room = await client.room_create() + + self.assertEqual( + bot_req({"body": body}, KEY, room.room_id), {"status": 200, "ret": "OK"} + ) + + sync = await client.sync() + messages = await client.room_messages(room.room_id, sync.next_batch) + await client.close() + + message = messages.chunk[0] + self.assertEqual(message.sender, FULL_ID) + self.assertEqual(message.body, body) + self.assertEqual(message.formatted_body, "

Hello

") + async def test_reconnect(self): """Check the reconnecting path.""" client = nio.AsyncClient(MATRIX_URL, MATRIX_ID) @@ -56,6 +78,6 @@ class BotTest(unittest.IsolatedAsyncioTestCase): await client.logout(all_devices=True) await client.close() self.assertEqual( - bot_req({"text": "Re"}, KEY, room.room_id), + bot_req({"body": "Re"}, KEY, room.room_id), {"status": 200, "ret": "OK"}, ) From fa8f9b4a51eed409a892deeba10b13369d717e51 Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Sat, 31 Jul 2021 11:31:31 +0200 Subject: [PATCH 048/184] allow "key" to be passed as a parameter This was initially designed and implemented in #4 Co-authored-by: Sven Seeberg --- CHANGELOG.md | 3 ++- matrix_webhook/__main__.py | 6 +++++- tests/start.py | 11 ++++++++--- tests/tests.py | 10 +++++++++- 4 files changed, 24 insertions(+), 6 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5c55982..35d0863 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,7 +6,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] -- update "text" key to "body". +- allow "key" to be passed as a parameter +- rename "text" to "body". ## [3.1.0] - 2021-07-18 diff --git a/matrix_webhook/__main__.py b/matrix_webhook/__main__.py index 2977f72..17305cc 100644 --- a/matrix_webhook/__main__.py +++ b/matrix_webhook/__main__.py @@ -38,10 +38,14 @@ async def handler(request): except json.decoder.JSONDecodeError: return create_json_response(HTTPStatus.BAD_REQUEST, "Invalid JSON") - # legacy naming: + # legacy naming if "text" in data and "body" not in data: data["body"] = data["text"] + # allow key to be passed as a parameter + if "key" in request.rel_url.query and "key" not in data: + data["key"] = request.rel_url.query["key"] + if not all(key in data for key in ["body", "key"]): return create_json_response( HTTPStatus.BAD_REQUEST, "Missing body and/or API key property" diff --git a/tests/start.py b/tests/start.py index 3acb3b5..f235edd 100755 --- a/tests/start.py +++ b/tests/start.py @@ -25,12 +25,17 @@ parser.add_argument( ) -def bot_req(req=None, key=None, room_id=None): +def bot_req(req=None, key=None, room_id=None, key_as_param=False): """Bot requests boilerplate.""" + params = {} + if key is not None: - req["key"] = key + if key_as_param: + params["key"] = key + else: + req["key"] = key url = BOT_URL if room_id is None else f"{BOT_URL}/{room_id}" - return httpx.post(url, json=req).json() + return httpx.post(url, params=params, json=req).json() def wait_available(url: str, key: str, timeout: int = 10) -> bool: diff --git a/tests/tests.py b/tests/tests.py index 096dace..2bc97e3 100644 --- a/tests/tests.py +++ b/tests/tests.py @@ -18,13 +18,21 @@ class BotTest(unittest.IsolatedAsyncioTestCase): {"status": 400, "ret": "Missing body and/or API key property"}, ) self.assertEqual( - bot_req({"body": 3, "key": None}), {"status": 401, "ret": "Invalid API key"} + bot_req({"body": 3}, "wrong_key"), {"status": 401, "ret": "Invalid API key"} + ) + self.assertEqual( + bot_req({"body": 3}, "wrong_key", key_as_param=True), + {"status": 401, "ret": "Invalid API key"}, ) # TODO: if the client from matrix_webhook has olm support, this won't be a 403 from synapse, # but a LocalProtocolError from matrix_webhook self.assertEqual( bot_req({"body": 3}, KEY), {"status": 403, "ret": "Unknown room"} ) + self.assertEqual( + bot_req({"body": 3}, KEY, key_as_param=True), + {"status": 403, "ret": "Unknown room"}, + ) async def test_message(self): """Send a markdown message with the old format, and check the result.""" From 3bebc88ee21438838d43828c4b6c47c2bed96dfc Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Sat, 31 Jul 2021 12:03:26 +0200 Subject: [PATCH 049/184] allow direct formatted_body MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This was initially designed and implemented in #6 Co-authored-by: Gerhard Bräunlich --- CHANGELOG.md | 2 ++ matrix_webhook/__main__.py | 7 ++++++- tests/tests.py | 26 ++++++++++++++++++++++++++ 3 files changed, 34 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 35d0863..baff78f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +- add formatted_body to bypass markdown with direct + [matrix-custom-HTML](https://matrix.org/docs/spec/client_server/r0.6.1#m-room-message-msgtypes) - allow "key" to be passed as a parameter - rename "text" to "body". diff --git a/matrix_webhook/__main__.py b/matrix_webhook/__main__.py index 17305cc..005a3eb 100644 --- a/matrix_webhook/__main__.py +++ b/matrix_webhook/__main__.py @@ -54,12 +54,17 @@ async def handler(request): if data["key"] != conf.API_KEY: return create_json_response(HTTPStatus.UNAUTHORIZED, "Invalid API key") + if "formatted_body" in data: + formatted_body = data["formatted_body"] + else: + formatted_body = markdown(str(data["body"]), extensions=["extra"]) + room_id = request.path[1:] content = { "msgtype": "m.text", "body": data["body"], "format": "org.matrix.custom.html", - "formatted_body": markdown(str(data["body"]), extensions=["extra"]), + "formatted_body": formatted_body, } for _ in range(10): try: diff --git a/tests/tests.py b/tests/tests.py index 2bc97e3..9a82cc9 100644 --- a/tests/tests.py +++ b/tests/tests.py @@ -78,6 +78,32 @@ class BotTest(unittest.IsolatedAsyncioTestCase): self.assertEqual(message.body, body) self.assertEqual(message.formatted_body, "

Hello

") + async def test_formatted_body(self): + """Send a formatted message, and check the result.""" + body = "Formatted message" + formatted_body = "markdownFormatted message" + messages = [] + client = nio.AsyncClient(MATRIX_URL, MATRIX_ID) + + await client.login(MATRIX_PW) + room = await client.room_create() + + self.assertEqual( + bot_req( + {"body": body, "formatted_body": formatted_body}, KEY, room.room_id + ), + {"status": 200, "ret": "OK"}, + ) + + sync = await client.sync() + messages = await client.room_messages(room.room_id, sync.next_batch) + await client.close() + + message = messages.chunk[0] + self.assertEqual(message.sender, FULL_ID) + self.assertEqual(message.body, body) + self.assertEqual(message.formatted_body, formatted_body) + async def test_reconnect(self): """Check the reconnecting path.""" client = nio.AsyncClient(MATRIX_URL, MATRIX_ID) From 2d232fe1f7b8f0839e9219e3eb4fff7823d61fe7 Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Sat, 31 Jul 2021 13:06:36 +0200 Subject: [PATCH 050/184] add grafana formatter This was initially designed and implemented in #4 Co-authored-by: Sven Seeberg --- CHANGELOG.md | 1 + matrix_webhook/__main__.py | 8 +++++++- matrix_webhook/formatters.py | 15 +++++++++++++++ 3 files changed, 23 insertions(+), 1 deletion(-) create mode 100644 matrix_webhook/formatters.py diff --git a/CHANGELOG.md b/CHANGELOG.md index baff78f..d7778fc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +- add grafana formatter - add formatted_body to bypass markdown with direct [matrix-custom-HTML](https://matrix.org/docs/spec/client_server/r0.6.1#m-room-message-msgtypes) - allow "key" to be passed as a parameter diff --git a/matrix_webhook/__main__.py b/matrix_webhook/__main__.py index 005a3eb..c1ac638 100644 --- a/matrix_webhook/__main__.py +++ b/matrix_webhook/__main__.py @@ -16,7 +16,7 @@ from nio import AsyncClient from nio.exceptions import LocalProtocolError from nio.responses import RoomSendError -from . import conf +from . import conf, formatters ERROR_MAP = {"M_FORBIDDEN": HTTPStatus.FORBIDDEN} @@ -46,6 +46,12 @@ async def handler(request): if "key" in request.rel_url.query and "key" not in data: data["key"] = request.rel_url.query["key"] + if "formatter" in request.rel_url.query: + try: + data = getattr(formatters, request.rel_url.query["formatter"])(data) + except AttributeError: + return create_json_response(HTTPStatus.BAD_REQUEST, "Unknown formatter") + if not all(key in data for key in ["body", "key"]): return create_json_response( HTTPStatus.BAD_REQUEST, "Missing body and/or API key property" diff --git a/matrix_webhook/formatters.py b/matrix_webhook/formatters.py new file mode 100644 index 0000000..1e4600e --- /dev/null +++ b/matrix_webhook/formatters.py @@ -0,0 +1,15 @@ +"""Formatters for matrix webhook.""" + + +def grafana(data): + """Pretty-print a grafana notification.""" + text = "" + if "title" in data: + text = "### " + data["title"] + "\n" + if "message" in data: + text = text + data["message"] + "\n\n" + if "evalMatches" in data: + for match in data["evalMatches"]: + text = text + "* " + match["metric"] + ": " + str(match["value"]) + "\n" + data["body"] = text + return data From c07d4bfa8d506bce5fd9f5d1a85d18f35dc4aade Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Sat, 31 Jul 2021 13:06:52 +0200 Subject: [PATCH 051/184] add tests for grafana formatter --- tests/start.py | 6 ++-- tests/test_grafana.py | 66 +++++++++++++++++++++++++++++++++++++++++++ tests/tests.py | 4 +++ 3 files changed, 73 insertions(+), 3 deletions(-) create mode 100644 tests/test_grafana.py diff --git a/tests/start.py b/tests/start.py index f235edd..ea76b56 100755 --- a/tests/start.py +++ b/tests/start.py @@ -25,10 +25,10 @@ parser.add_argument( ) -def bot_req(req=None, key=None, room_id=None, key_as_param=False): +def bot_req(req=None, key=None, room_id=None, params=None, key_as_param=False): """Bot requests boilerplate.""" - params = {} - + if params is None: + params = {} if key is not None: if key_as_param: params["key"] = key diff --git a/tests/test_grafana.py b/tests/test_grafana.py new file mode 100644 index 0000000..33c080c --- /dev/null +++ b/tests/test_grafana.py @@ -0,0 +1,66 @@ +"""Test module for grafana formatter.""" + +import unittest + +import httpx +import nio + +from .start import BOT_URL, FULL_ID, KEY, MATRIX_ID, MATRIX_PW, MATRIX_URL + +# ref https://grafana.com/docs/grafana/latest/alerting/old-alerting/notifications/#webhook +EXAMPLE_GRAFANA_REQUEST = """ +{ + "dashboardId":1, + "evalMatches":[ + { + "value":1, + "metric":"Count", + "tags":{} + } + ], + "imageUrl":"https://grafana.com/assets/img/blog/mixed_styles.png", + "message":"Notification Message", + "orgId":1, + "panelId":2, + "ruleId":1, + "ruleName":"Panel Title alert", + "ruleUrl":"http://localhost:3000/d/hZ7BuVbWz/test-dashboard?fullscreen\u0026edit\u0026tab=alert\u0026panelId=2\u0026orgId=1", + "state":"alerting", + "tags":{ + "tag name":"tag value" + }, + "title":"[Alerting] Panel Title alert" +} +""" + + +class GrafanaFormatterTest(unittest.IsolatedAsyncioTestCase): + """Grafana formatter test class.""" + + async def test_grafana_body(self): + """Send a markdown message, and check the result.""" + messages = [] + client = nio.AsyncClient(MATRIX_URL, MATRIX_ID) + + await client.login(MATRIX_PW) + room = await client.room_create() + + self.assertEqual( + httpx.post( + f"{BOT_URL}/{room.room_id}", + params={"formatter": "grafana", "key": KEY}, + content=EXAMPLE_GRAFANA_REQUEST, + ).json(), + {"status": 200, "ret": "OK"}, + ) + + sync = await client.sync() + messages = await client.room_messages(room.room_id, sync.next_batch) + await client.close() + + message = messages.chunk[0] + self.assertEqual(message.sender, FULL_ID) + self.assertEqual( + message.body, + "### [Alerting] Panel Title alert\nNotification Message\n\n* Count: 1\n", + ) diff --git a/tests/tests.py b/tests/tests.py index 9a82cc9..1b17f9e 100644 --- a/tests/tests.py +++ b/tests/tests.py @@ -24,6 +24,10 @@ class BotTest(unittest.IsolatedAsyncioTestCase): bot_req({"body": 3}, "wrong_key", key_as_param=True), {"status": 401, "ret": "Invalid API key"}, ) + self.assertEqual( + bot_req({"body": 3}, KEY, params={"formatter": "wrong_formatter"}), + {"status": 400, "ret": "Unknown formatter"}, + ) # TODO: if the client from matrix_webhook has olm support, this won't be a 403 from synapse, # but a LocalProtocolError from matrix_webhook self.assertEqual( From c8f6c9ec287408a7b1581b2b1bd79a928afaf5e2 Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Sat, 31 Jul 2021 13:11:54 +0200 Subject: [PATCH 052/184] README: document grafana usage --- README.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/README.md b/README.md index 9f2442a..404f051 100644 --- a/README.md +++ b/README.md @@ -79,6 +79,11 @@ curl -d '{"body":"new contrib from toto: [44](http://radio.localhost/map/#44)", ``` (or localhost:4785 without docker) +### Grafana + +Add a webhook with an URL like: +`http://matrixwebhook.localhost/!DPrUlnwOhBEfYwsDLh:matrix.org?key=secret&formatter=grafana' + ## Test room [#matrix-webhook:tetaneutral.net](https://matrix.to/#/!DPrUlnwOhBEfYwsDLh:matrix.org?via=laas.fr&via=tetaneutral.net&via=aen.im) From 0ccec84eef55103ae614229884b9a0e3efc9574f Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Sat, 31 Jul 2021 15:16:07 +0200 Subject: [PATCH 053/184] room_id can come from url, content, or parameters --- CHANGELOG.md | 1 + matrix_webhook/__main__.py | 16 ++++++++--- tests/start.py | 13 +++++++-- tests/tests.py | 58 ++++++++++++++++++++++++++++++++++---- 4 files changed, 77 insertions(+), 11 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index d7778fc..2ec7b0f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - add formatted_body to bypass markdown with direct [matrix-custom-HTML](https://matrix.org/docs/spec/client_server/r0.6.1#m-room-message-msgtypes) - allow "key" to be passed as a parameter +- allow "room_id" to be passed as a parameter or with the data - rename "text" to "body". ## [3.1.0] - 2021-07-18 diff --git a/matrix_webhook/__main__.py b/matrix_webhook/__main__.py index c1ac638..4416ccb 100644 --- a/matrix_webhook/__main__.py +++ b/matrix_webhook/__main__.py @@ -52,9 +52,18 @@ async def handler(request): except AttributeError: return create_json_response(HTTPStatus.BAD_REQUEST, "Unknown formatter") - if not all(key in data for key in ["body", "key"]): + if "room_id" in request.rel_url.query and "room_id" not in data: + data["room_id"] = request.rel_url.query["room_id"] + if "room_id" not in data: + data["room_id"] = request.path.lstrip("/") + + missing = [] + for key in ["body", "key", "room_id"]: + if key not in data or not data[key]: + missing.append(key) + if missing: return create_json_response( - HTTPStatus.BAD_REQUEST, "Missing body and/or API key property" + HTTPStatus.BAD_REQUEST, f"Missing {', '.join(missing)}" ) if data["key"] != conf.API_KEY: @@ -65,7 +74,6 @@ async def handler(request): else: formatted_body = markdown(str(data["body"]), extensions=["extra"]) - room_id = request.path[1:] content = { "msgtype": "m.text", "body": data["body"], @@ -74,7 +82,7 @@ async def handler(request): } for _ in range(10): try: - resp = await send_room_message(room_id, content) + resp = await send_room_message(data["room_id"], content) if isinstance(resp, RoomSendError): if resp.status_code == "M_UNKNOWN_TOKEN": LOGGER.warning("Reconnecting") diff --git a/tests/start.py b/tests/start.py index ea76b56..c4160bd 100755 --- a/tests/start.py +++ b/tests/start.py @@ -25,7 +25,14 @@ parser.add_argument( ) -def bot_req(req=None, key=None, room_id=None, params=None, key_as_param=False): +def bot_req( + req=None, + key=None, + room_id=None, + params=None, + key_as_param=False, + room_as_parameter=False, +): """Bot requests boilerplate.""" if params is None: params = {} @@ -34,7 +41,9 @@ def bot_req(req=None, key=None, room_id=None, params=None, key_as_param=False): params["key"] = key else: req["key"] = key - url = BOT_URL if room_id is None else f"{BOT_URL}/{room_id}" + if room_as_parameter: + params["room_id"] = room_id + url = BOT_URL if room_id is None or room_as_parameter else f"{BOT_URL}/{room_id}" return httpx.post(url, params=params, json=req).json() diff --git a/tests/tests.py b/tests/tests.py index 1b17f9e..a997aea 100644 --- a/tests/tests.py +++ b/tests/tests.py @@ -15,13 +15,14 @@ class BotTest(unittest.IsolatedAsyncioTestCase): self.assertEqual(bot_req(), {"status": 400, "ret": "Invalid JSON"}) self.assertEqual( bot_req({"toto": 3}), - {"status": 400, "ret": "Missing body and/or API key property"}, + {"status": 400, "ret": "Missing body, key, room_id"}, ) self.assertEqual( - bot_req({"body": 3}, "wrong_key"), {"status": 401, "ret": "Invalid API key"} + bot_req({"body": 3}, "wrong_key", "wrong_room"), + {"status": 401, "ret": "Invalid API key"}, ) self.assertEqual( - bot_req({"body": 3}, "wrong_key", key_as_param=True), + bot_req({"body": 3}, "wrong_key", "wrong_room", key_as_param=True), {"status": 401, "ret": "Invalid API key"}, ) self.assertEqual( @@ -31,10 +32,11 @@ class BotTest(unittest.IsolatedAsyncioTestCase): # TODO: if the client from matrix_webhook has olm support, this won't be a 403 from synapse, # but a LocalProtocolError from matrix_webhook self.assertEqual( - bot_req({"body": 3}, KEY), {"status": 403, "ret": "Unknown room"} + bot_req({"body": 3}, KEY, "wrong_room"), + {"status": 403, "ret": "Unknown room"}, ) self.assertEqual( - bot_req({"body": 3}, KEY, key_as_param=True), + bot_req({"body": 3}, KEY, "wrong_room", key_as_param=True), {"status": 403, "ret": "Unknown room"}, ) @@ -60,6 +62,52 @@ class BotTest(unittest.IsolatedAsyncioTestCase): self.assertEqual(message.body, text) self.assertEqual(message.formatted_body, "

Hello

") + async def test_room_id_req(self): + """Send a markdown message in a room given as data, and check the result.""" + body = "# Hello" + messages = [] + client = nio.AsyncClient(MATRIX_URL, MATRIX_ID) + + await client.login(MATRIX_PW) + room = await client.room_create() + + self.assertEqual( + bot_req({"body": body, "room_id": room.room_id}, KEY, room.room_id), + {"status": 200, "ret": "OK"}, + ) + + sync = await client.sync() + messages = await client.room_messages(room.room_id, sync.next_batch) + await client.close() + + message = messages.chunk[0] + self.assertEqual(message.sender, FULL_ID) + self.assertEqual(message.body, body) + self.assertEqual(message.formatted_body, "

Hello

") + + async def test_room_id_parameter(self): + """Send a markdown message in a room given as parameter, and check the result.""" + body = "# Hello" + messages = [] + client = nio.AsyncClient(MATRIX_URL, MATRIX_ID) + + await client.login(MATRIX_PW) + room = await client.room_create() + + self.assertEqual( + bot_req({"body": body}, KEY, room.room_id, room_as_parameter=True), + {"status": 200, "ret": "OK"}, + ) + + sync = await client.sync() + messages = await client.room_messages(room.room_id, sync.next_batch) + await client.close() + + message = messages.chunk[0] + self.assertEqual(message.sender, FULL_ID) + self.assertEqual(message.body, body) + self.assertEqual(message.formatted_body, "

Hello

") + async def test_markdown_body(self): """Send a markdown message, and check the result.""" body = "# Hello" From 528940abccc3cd00164fdd0c67ec215c2a8fc7cd Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Sat, 7 Aug 2021 17:50:11 +0200 Subject: [PATCH 054/184] fix changelog --- CHANGELOG.md | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2ec7b0f..168d746 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,11 +13,13 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - allow "room_id" to be passed as a parameter or with the data - rename "text" to "body". +## [3.1.1] - 2021-07-18 + ## [3.1.0] - 2021-07-18 - Publish on PyPI & Docker Hub with Github Actions in [#10](https://github.com/nim65s/matrix-webhook/pull/10) - by [@nim65s](https://github.com/) + by [@nim65s](https://github.com/nim65s) ## [3.0.0] - 2021-07-18 @@ -43,6 +45,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [1.0.0] - 2020-02-14 - First release with matrix-client & http.server -[Unreleased]: https://github.com/nim65s/matrix-webhook/compare/v2.0.0...devel +[Unreleased]: https://github.com/nim65s/matrix-webhook/compare/v3.1.1...devel +[3.1.1]: https://github.com/nim65s/matrix-webhook/compare/v3.1.0...v3.1.1 +[3.1.0]: https://github.com/nim65s/matrix-webhook/compare/v3.0.0...v3.1.0 +[3.0.0]: https://github.com/nim65s/matrix-webhook/compare/v2.0.0...v3.0.0 [2.0.0]: https://github.com/nim65s/matrix-webhook/compare/v1.0.0...v2.0.0 [1.0.0]: https://github.com/nim65s/matrix-webhook/releases/tag/v1.0.0 From 530f40a1295f22c3ed430808c7d4dfd57ce2b95f Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Sat, 7 Aug 2021 17:50:49 +0200 Subject: [PATCH 055/184] setup action to publish releases on github --- .github/workflows/pypi.yml | 15 --------------- .github/workflows/release.yml | 19 +++++++++++++++++++ CHANGELOG.md | 1 + 3 files changed, 20 insertions(+), 15 deletions(-) delete mode 100644 .github/workflows/pypi.yml create mode 100644 .github/workflows/release.yml diff --git a/.github/workflows/pypi.yml b/.github/workflows/pypi.yml deleted file mode 100644 index ee77ec4..0000000 --- a/.github/workflows/pypi.yml +++ /dev/null @@ -1,15 +0,0 @@ -name: PyPI - -on: - push: - tags: - - 'v*' - -jobs: - pypi: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - run: pip install -U poetry twine wheel - - run: poetry build - - run: twine upload --non-interactive -u __token__ -p ${{ secrets.PYPI_TOKEN }} dist/* diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..9d52615 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,19 @@ +name: Release on GitHub & PyPI + +on: + push: + tags: + - 'v*' + +jobs: + release: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - run: pip install -U poetry twine wheel + - run: poetry build + - run: twine upload --non-interactive -u __token__ -p ${{ secrets.PYPI_TOKEN }} dist/* + - run: echo "TAG=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV + - run: gh release create -t "Release ${{ env.TAG}}" -n "$(awk '/## \[${{ env.TAG }}] - /{flag=1;next}/## \[/{flag=0}flag' CHANGELOG.md)" ${{ env.TAG }} dist/* + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/CHANGELOG.md b/CHANGELOG.md index 168d746..66a1d03 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -12,6 +12,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - allow "key" to be passed as a parameter - allow "room_id" to be passed as a parameter or with the data - rename "text" to "body". +- Publish releases also on github from github actions ## [3.1.1] - 2021-07-18 From 6f7d38dbd70b8034fe477786bd17db0b391753e0 Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Fri, 27 Aug 2021 17:44:40 +0200 Subject: [PATCH 056/184] update test for latest synapse docker image --- CHANGELOG.md | 1 + tests/Dockerfile | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 66a1d03..cf6fbe8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,6 +13,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - allow "room_id" to be passed as a parameter or with the data - rename "text" to "body". - Publish releases also on github from github actions +- fixed tests for recent synapse docker image ## [3.1.1] - 2021-07-18 diff --git a/tests/Dockerfile b/tests/Dockerfile index 42b60b9..09c8b05 100644 --- a/tests/Dockerfile +++ b/tests/Dockerfile @@ -4,7 +4,7 @@ FROM matrixdotorg/synapse # The config dir defaults to /data which is a volume made to keep data. # Here, we want to trash those (and avoid the permission issues) by using something else -ENV SYNAPSE_CONFIG_DIR=/srv SYNAPSE_SERVER_NAME=tests SYNAPSE_REPORT_STATS=no +ENV SYNAPSE_CONFIG_DIR=/srv SYNAPSE_DATA_DIR=/srv SYNAPSE_SERVER_NAME=tests SYNAPSE_REPORT_STATS=no # Generate configuration and keys for synapse WORKDIR $SYNAPSE_CONFIG_DIR From eabb446d05d119df345c45489e1238b943a09675 Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Fri, 27 Aug 2021 17:49:11 +0200 Subject: [PATCH 057/184] ci: release also on github --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 404f051..55d3ace 100644 --- a/README.md +++ b/README.md @@ -3,7 +3,7 @@ [![Tests](https://github.com/nim65s/matrix-webhook/actions/workflows/test.yml/badge.svg)](https://github.com/nim65s/matrix-webhook/actions/workflows/test.yml) [![Lints](https://github.com/nim65s/matrix-webhook/actions/workflows/lint.yml/badge.svg)](https://github.com/nim65s/matrix-webhook/actions/workflows/lint.yml) [![Docker-Hub](https://github.com/nim65s/matrix-webhook/actions/workflows/docker-hub.yml/badge.svg)](https://hub.docker.com/r/nim65s/matrix-webhook) -[![PyPI](https://github.com/nim65s/matrix-webhook/actions/workflows/pypi.yml/badge.svg)](https://pypi.org/project/matrix-webhook/) +[![Release](https://github.com/nim65s/matrix-webhook/actions/workflows/release.yml/badge.svg)](https://pypi.org/project/matrix-webhook/) [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) [![codecov](https://codecov.io/gh/nim65s/matrix-webhook/branch/master/graph/badge.svg?token=BLGISGCYKG)](https://codecov.io/gh/nim65s/matrix-webhook) From c03ae0a571287f4f6a129993c6589efc472c731a Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Fri, 27 Aug 2021 17:58:34 +0200 Subject: [PATCH 058/184] lint: add flake8 configuration Black allows up to 88 characters per line. Put this configuration into a separated file, as pyproject.toml won't do ref. https://github.com/PyCQA/flake8/issues/234 --- .flake8 | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 .flake8 diff --git a/.flake8 b/.flake8 new file mode 100644 index 0000000..2bcd70e --- /dev/null +++ b/.flake8 @@ -0,0 +1,2 @@ +[flake8] +max-line-length = 88 From eb3c795368da825ee74254b936c2dace81287276 Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Fri, 27 Aug 2021 18:04:34 +0200 Subject: [PATCH 059/184] lint: fix line length --- matrix_webhook/conf.py | 3 ++- tests/start.py | 3 ++- tests/tests.py | 4 ++-- 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/matrix_webhook/conf.py b/matrix_webhook/conf.py index fd39e02..bb93335 100644 --- a/matrix_webhook/conf.py +++ b/matrix_webhook/conf.py @@ -20,7 +20,8 @@ parser.add_argument( "-u", "--matrix-url", default=os.environ.get("MATRIX_URL", "https://matrix.org"), - help="matrix homeserver url. Default: `https://matrix.org`. Environment variable: `MATRIX_URL`", + help="matrix homeserver url. Default: `https://matrix.org`. " + "Environment variable: `MATRIX_URL`", ) parser.add_argument( "-i", diff --git a/tests/start.py b/tests/start.py index c4160bd..89aa8ec 100755 --- a/tests/start.py +++ b/tests/start.py @@ -51,7 +51,8 @@ def wait_available(url: str, key: str, timeout: int = 10) -> bool: """Wait until a service answer correctly or timeout.""" def check_json(url: str, key: str) -> bool: - """Ensure a service at a given url answers with valid json containing a certain key.""" + """Ensure a service at a given url answers + with valid json containing a certain key.""" try: data = httpx.get(url).json() return key in data diff --git a/tests/tests.py b/tests/tests.py index a997aea..01993c3 100644 --- a/tests/tests.py +++ b/tests/tests.py @@ -29,8 +29,8 @@ class BotTest(unittest.IsolatedAsyncioTestCase): bot_req({"body": 3}, KEY, params={"formatter": "wrong_formatter"}), {"status": 400, "ret": "Unknown formatter"}, ) - # TODO: if the client from matrix_webhook has olm support, this won't be a 403 from synapse, - # but a LocalProtocolError from matrix_webhook + # TODO: if the client from matrix_webhook has olm support, + # this won't be a 403 from synapse, but a LocalProtocolError from matrix_webhook self.assertEqual( bot_req({"body": 3}, KEY, "wrong_room"), {"status": 403, "ret": "Unknown room"}, From 6aaac9149d510d1d7bec7a040631f12dcdfea5bf Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Fri, 27 Aug 2021 18:06:35 +0200 Subject: [PATCH 060/184] lint: pre-commit autoupdate --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6fc3f8b..c0214fd 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -14,7 +14,7 @@ repos: - id: mixed-line-ending - id: trailing-whitespace - repo: https://github.com/psf/black - rev: 21.6b0 + rev: 21.7b0 hooks: - id: black language_version: python3 From 2b7b79971dbbd81e9a070f7b284b305c2a141f38 Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Fri, 27 Aug 2021 18:12:11 +0200 Subject: [PATCH 061/184] lint: fix pydocstyle --- .pre-commit-config.yaml | 2 ++ pyproject.toml | 2 +- tests/start.py | 5 +++-- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index c0214fd..6e8ad61 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -22,6 +22,8 @@ repos: rev: 6.1.1 hooks: - id: pydocstyle + args: + - --ignore=D200,D212 - repo: https://github.com/PyCQA/flake8 rev: 3.9.2 hooks: diff --git a/pyproject.toml b/pyproject.toml index ab1b583..01252ba 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -21,7 +21,7 @@ pydocstyle = "^6.1.1" flake8 = "^3.9.2" [tool.pydocstyle] -ignore = ["D203", "D204", "D212"] +ignore = ["D200", "D203", "D204", "D212"] [build-system] requires = ["poetry-core>=1.0.0"] diff --git a/tests/start.py b/tests/start.py index 89aa8ec..3cc66eb 100755 --- a/tests/start.py +++ b/tests/start.py @@ -51,8 +51,9 @@ def wait_available(url: str, key: str, timeout: int = 10) -> bool: """Wait until a service answer correctly or timeout.""" def check_json(url: str, key: str) -> bool: - """Ensure a service at a given url answers - with valid json containing a certain key.""" + """ + Ensure a service at a given url answers with valid json including a certain key. + """ try: data = httpx.get(url).json() return key in data From 7f20fb7ff984f795217e910d87ac6cad98bb3ade Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Sun, 8 Aug 2021 08:41:37 +0200 Subject: [PATCH 062/184] split code in utils / handler / app --- matrix_webhook/__main__.py | 167 +------------------------------------ matrix_webhook/app.py | 56 +++++++++++++ matrix_webhook/handler.py | 72 ++++++++++++++++ matrix_webhook/utils.py | 47 +++++++++++ 4 files changed, 178 insertions(+), 164 deletions(-) create mode 100644 matrix_webhook/app.py create mode 100644 matrix_webhook/handler.py create mode 100644 matrix_webhook/utils.py diff --git a/matrix_webhook/__main__.py b/matrix_webhook/__main__.py index 4416ccb..da99dcc 100644 --- a/matrix_webhook/__main__.py +++ b/matrix_webhook/__main__.py @@ -1,170 +1,9 @@ -""" -Matrix Webhook. - -Post a message to a matrix room with a simple HTTP POST -""" - -import asyncio -import json +"""Matrix Webhook module entrypoint.""" import logging -from http import HTTPStatus -from signal import SIGINT, SIGTERM - -from aiohttp import web -from markdown import markdown -from nio import AsyncClient -from nio.exceptions import LocalProtocolError -from nio.responses import RoomSendError - -from . import conf, formatters - -ERROR_MAP = {"M_FORBIDDEN": HTTPStatus.FORBIDDEN} - -CLIENT = AsyncClient(conf.MATRIX_URL, conf.MATRIX_ID) -LOGGER = logging.getLogger("matrix-webhook") - - -async def handler(request): - """ - Coroutine given to the server, st. it knows what to do with an HTTP request. - - This one handles a POST, checks its content, and forwards it to the matrix room. - """ - LOGGER.debug(f"Handling {request=}") - data = await request.read() - - try: - data = json.loads(data.decode()) - except json.decoder.JSONDecodeError: - return create_json_response(HTTPStatus.BAD_REQUEST, "Invalid JSON") - - # legacy naming - if "text" in data and "body" not in data: - data["body"] = data["text"] - - # allow key to be passed as a parameter - if "key" in request.rel_url.query and "key" not in data: - data["key"] = request.rel_url.query["key"] - - if "formatter" in request.rel_url.query: - try: - data = getattr(formatters, request.rel_url.query["formatter"])(data) - except AttributeError: - return create_json_response(HTTPStatus.BAD_REQUEST, "Unknown formatter") - - if "room_id" in request.rel_url.query and "room_id" not in data: - data["room_id"] = request.rel_url.query["room_id"] - if "room_id" not in data: - data["room_id"] = request.path.lstrip("/") - - missing = [] - for key in ["body", "key", "room_id"]: - if key not in data or not data[key]: - missing.append(key) - if missing: - return create_json_response( - HTTPStatus.BAD_REQUEST, f"Missing {', '.join(missing)}" - ) - - if data["key"] != conf.API_KEY: - return create_json_response(HTTPStatus.UNAUTHORIZED, "Invalid API key") - - if "formatted_body" in data: - formatted_body = data["formatted_body"] - else: - formatted_body = markdown(str(data["body"]), extensions=["extra"]) - - content = { - "msgtype": "m.text", - "body": data["body"], - "format": "org.matrix.custom.html", - "formatted_body": formatted_body, - } - for _ in range(10): - try: - resp = await send_room_message(data["room_id"], content) - if isinstance(resp, RoomSendError): - if resp.status_code == "M_UNKNOWN_TOKEN": - LOGGER.warning("Reconnecting") - await CLIENT.login(conf.MATRIX_PW) - else: - return create_json_response( - ERROR_MAP[resp.status_code], resp.message - ) - else: - break - except LocalProtocolError as e: - LOGGER.error(f"Send error: {e}") - LOGGER.warning("Trying again") - else: - return create_json_response( - HTTPStatus.GATEWAY_TIMEOUT, "Homeserver not responding" - ) - - return create_json_response(HTTPStatus.OK, "OK") - - -def create_json_response(status, ret): - """Create a JSON response.""" - LOGGER.debug(f"Creating json response: {status=}, {ret=}") - response_data = {"status": status, "ret": ret} - return web.json_response(response_data, status=status) - - -async def send_room_message(room_id, content): - """Send a message to a room.""" - LOGGER.debug(f"Sending room message in {room_id=}: {content=}") - return await CLIENT.room_send( - room_id=room_id, message_type="m.room.message", content=content - ) - - -async def main(event): - """ - Launch main coroutine. - - matrix client login & start web server - """ - LOGGER.info(f"Log in {conf.MATRIX_ID=} on {conf.MATRIX_URL=}") - await CLIENT.login(conf.MATRIX_PW) - - server = web.Server(handler) - runner = web.ServerRunner(server) - await runner.setup() - LOGGER.info(f"Binding on {conf.SERVER_ADDRESS=}") - site = web.TCPSite(runner, *conf.SERVER_ADDRESS) - await site.start() - - # Run until we get a shutdown request - await event.wait() - - # Cleanup - await runner.cleanup() - await CLIENT.close() - - -def terminate(event, signal): - """Close handling stuff.""" - event.set() - asyncio.get_event_loop().remove_signal_handler(signal) - - -def run(): - """Launch everything.""" - LOGGER.info("Starting...") - loop = asyncio.get_event_loop() - event = asyncio.Event() - - for sig in (SIGINT, SIGTERM): - loop.add_signal_handler(sig, terminate, event, sig) - - loop.run_until_complete(main(event)) - - LOGGER.info("Closing...") - loop.close() +from . import app, conf if __name__ == "__main__": log_format = "%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(message)s" logging.basicConfig(level=50 - 10 * conf.VERBOSE, format=log_format) - run() + app.run() diff --git a/matrix_webhook/app.py b/matrix_webhook/app.py new file mode 100644 index 0000000..dec9ea1 --- /dev/null +++ b/matrix_webhook/app.py @@ -0,0 +1,56 @@ +"""Matrix Webhook app.""" + +import asyncio +import logging +from signal import SIGINT, SIGTERM + +from aiohttp import web + +from . import conf, handler, utils + +LOGGER = logging.getLogger("matrix_webhook.app") + + +async def main(event): + """ + Launch main coroutine. + + matrix client login & start web server + """ + LOGGER.info(f"Log in {conf.MATRIX_ID=} on {conf.MATRIX_URL=}") + await utils.CLIENT.login(conf.MATRIX_PW) + + server = web.Server(handler.matrix_webhook) + runner = web.ServerRunner(server) + await runner.setup() + LOGGER.info(f"Binding on {conf.SERVER_ADDRESS=}") + site = web.TCPSite(runner, *conf.SERVER_ADDRESS) + await site.start() + + # Run until we get a shutdown request + await event.wait() + + # Cleanup + await runner.cleanup() + await utils.CLIENT.close() + + +def terminate(event, signal): + """Close handling stuff.""" + event.set() + asyncio.get_event_loop().remove_signal_handler(signal) + + +def run(): + """Launch everything.""" + LOGGER.info("Starting...") + loop = asyncio.get_event_loop() + event = asyncio.Event() + + for sig in (SIGINT, SIGTERM): + loop.add_signal_handler(sig, terminate, event, sig) + + loop.run_until_complete(main(event)) + + LOGGER.info("Closing...") + loop.close() diff --git a/matrix_webhook/handler.py b/matrix_webhook/handler.py new file mode 100644 index 0000000..6ba9c59 --- /dev/null +++ b/matrix_webhook/handler.py @@ -0,0 +1,72 @@ +"""Matrix Webhook main request handler.""" + +import json +import logging +from http import HTTPStatus + +from markdown import markdown + +from . import conf, formatters, utils + +LOGGER = logging.getLogger("matrix_webhook.handler") + + +async def matrix_webhook(request): + """ + Coroutine given to the server, st. it knows what to do with an HTTP request. + + This one handles a POST, checks its content, and forwards it to the matrix room. + """ + LOGGER.debug(f"Handling {request=}") + data = await request.read() + + try: + data = json.loads(data.decode()) + except json.decoder.JSONDecodeError: + return utils.create_json_response(HTTPStatus.BAD_REQUEST, "Invalid JSON") + + # legacy naming + if "text" in data and "body" not in data: + data["body"] = data["text"] + + # allow key to be passed as a parameter + if "key" in request.rel_url.query and "key" not in data: + data["key"] = request.rel_url.query["key"] + + if "formatter" in request.rel_url.query: + try: + data = getattr(formatters, request.rel_url.query["formatter"])(data) + except AttributeError: + return utils.create_json_response( + HTTPStatus.BAD_REQUEST, "Unknown formatter" + ) + + if "room_id" in request.rel_url.query and "room_id" not in data: + data["room_id"] = request.rel_url.query["room_id"] + if "room_id" not in data: + data["room_id"] = request.path.lstrip("/") + + missing = [] + for key in ["body", "key", "room_id"]: + if key not in data or not data[key]: + missing.append(key) + if missing: + return utils.create_json_response( + HTTPStatus.BAD_REQUEST, f"Missing {', '.join(missing)}" + ) + + if data["key"] != conf.API_KEY: + return utils.create_json_response(HTTPStatus.UNAUTHORIZED, "Invalid API key") + + if "formatted_body" in data: + formatted_body = data["formatted_body"] + else: + formatted_body = markdown(str(data["body"]), extensions=["extra"]) + + content = { + "msgtype": "m.text", + "body": data["body"], + "format": "org.matrix.custom.html", + "formatted_body": formatted_body, + } + return await utils.send_room_message(data["room_id"], content) diff --git a/matrix_webhook/utils.py b/matrix_webhook/utils.py new file mode 100644 index 0000000..2b4d74d --- /dev/null +++ b/matrix_webhook/utils.py @@ -0,0 +1,47 @@ +"""Matrix Webhook utils.""" + +import logging +from http import HTTPStatus + +from aiohttp import web +from nio import AsyncClient +from nio.exceptions import LocalProtocolError +from nio.responses import RoomSendError + +from . import conf + +ERROR_MAP = {"M_FORBIDDEN": HTTPStatus.FORBIDDEN} +LOGGER = logging.getLogger("matrix_webhook.utils") +CLIENT = AsyncClient(conf.MATRIX_URL, conf.MATRIX_ID) + + +def create_json_response(status, ret): + """Create a JSON response.""" + LOGGER.debug(f"Creating json response: {status=}, {ret=}") + response_data = {"status": status, "ret": ret} + return web.json_response(response_data, status=status) + + +async def send_room_message(room_id, content): + """Send a message to a room.""" + LOGGER.debug(f"Sending room message in {room_id=}: {content=}") + + for _ in range(10): + try: + resp = await CLIENT.room_send( + room_id=room_id, message_type="m.room.message", content=content + ) + if isinstance(resp, RoomSendError): + if resp.status_code == "M_UNKNOWN_TOKEN": + LOGGER.warning("Reconnecting") + await CLIENT.login(conf.MATRIX_PW) + else: + return create_json_response( + ERROR_MAP[resp.status_code], resp.message + ) + else: + return create_json_response(HTTPStatus.OK, "OK") + except LocalProtocolError as e: + LOGGER.error(f"Send error: {e}") + LOGGER.warning("Trying again") + return create_json_response(HTTPStatus.GATEWAY_TIMEOUT, "Homeserver not responding") From 8f215c04fdb18aa9b386156e3e0938ab216323bb Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Sun, 8 Aug 2021 09:50:11 +0200 Subject: [PATCH 063/184] docs: release --- CHANGELOG.md | 2 +- docs/release.md | 18 ++++++++++++++++++ docs/release.sh | 20 ++++++++++++++++++++ 3 files changed, 39 insertions(+), 1 deletion(-) create mode 100644 docs/release.md create mode 100755 docs/release.sh diff --git a/CHANGELOG.md b/CHANGELOG.md index cf6fbe8..6a24cfc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -47,7 +47,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [1.0.0] - 2020-02-14 - First release with matrix-client & http.server -[Unreleased]: https://github.com/nim65s/matrix-webhook/compare/v3.1.1...devel +[Unreleased]: https://github.com/nim65s/matrix-webhook/compare/v3.1.1...master [3.1.1]: https://github.com/nim65s/matrix-webhook/compare/v3.1.0...v3.1.1 [3.1.0]: https://github.com/nim65s/matrix-webhook/compare/v3.0.0...v3.1.0 [3.0.0]: https://github.com/nim65s/matrix-webhook/compare/v2.0.0...v3.0.0 diff --git a/docs/release.md b/docs/release.md new file mode 100644 index 0000000..ba59891 --- /dev/null +++ b/docs/release.md @@ -0,0 +1,18 @@ +# Publish a new release + +A github actions handle the build of the release archives, and push them to PyPI and Github Releases. +To trigger it, we just need to: + +1. use poetry to update the version number +2. update the changelog +3. git commit +4. git tag +5. git push +6. git push --tags + + +For this, an helper script is provided: + +```bash +./docs/release.sh [patch|minor|major|x.y.z] +``` diff --git a/docs/release.sh b/docs/release.sh new file mode 100755 index 0000000..c1cb632 --- /dev/null +++ b/docs/release.sh @@ -0,0 +1,20 @@ +#!/bin/bash -eux +# ./docs/release.sh [patch|minor|major|x.y.z] + +[[ $(basename $PWD) == docs ]] && cd .. + + +OLD=$(poetry version -s) + +poetry version $1 + +NEW=$(poetry version -s) +DATE=$(date +%Y-%m-%d) + +sed -i "/^## \[Unreleased\]/a \\\n## [$NEW] - $DATE" CHANGELOG.md +sed -i "/^\[Unreleased\]/s/$OLD/$NEW/" CHANGELOG.md +sed -i "/^\[Unreleased\]/a [$NEW] https://github.com/nim65s/matrix-webhook/compare/v$OLD...v$NEW" CHANGELOG.md + +echo git add pyproject.toml CHANGELOG.md +echo git commit -m "Release v$NEW" +echo git tag -s "v$NEW" -m "Release v$NEW" From 9a544b8f2bc8200844d359643a1bf125bc341eb5 Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Sun, 8 Aug 2021 10:01:59 +0200 Subject: [PATCH 064/184] detail --- README.md | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/README.md b/README.md index 55d3ace..c4c31d1 100644 --- a/README.md +++ b/README.md @@ -81,8 +81,7 @@ curl -d '{"body":"new contrib from toto: [44](http://radio.localhost/map/#44)", ### Grafana -Add a webhook with an URL like: -`http://matrixwebhook.localhost/!DPrUlnwOhBEfYwsDLh:matrix.org?key=secret&formatter=grafana' +Add a webhook with an URL like ending with `?formatter=grafana&key=API_KEY' ## Test room From ac7d1d9647008145e9d0cf65d24744d0db4862b8 Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Fri, 27 Aug 2021 18:39:28 +0200 Subject: [PATCH 065/184] typo --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6a24cfc..f75b3db 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,7 +13,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - allow "room_id" to be passed as a parameter or with the data - rename "text" to "body". - Publish releases also on github from github actions -- fixed tests for recent synapse docker image +- fix tests for recent synapse docker image ## [3.1.1] - 2021-07-18 From 4bcdb25c809391baaabc264d9309059f9f48ead2 Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Fri, 27 Aug 2021 20:05:08 +0200 Subject: [PATCH 066/184] formatters: also get headers --- README.md | 8 ++++++-- matrix_webhook/formatters.py | 2 +- matrix_webhook/handler.py | 4 +++- 3 files changed, 10 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index c4c31d1..62f7dbd 100644 --- a/README.md +++ b/README.md @@ -79,9 +79,13 @@ curl -d '{"body":"new contrib from toto: [44](http://radio.localhost/map/#44)", ``` (or localhost:4785 without docker) -### Grafana +### For Github -Add a webhook with an URL like ending with `?formatter=grafana&key=API_KEY' +Add a JSON webhook with `?formatter=github`, and put the `API_KEY` as secret + +### For Grafana + +Add a webhook with an URL ending with `?formatter=grafana&key=API_KEY' ## Test room diff --git a/matrix_webhook/formatters.py b/matrix_webhook/formatters.py index 1e4600e..bec0979 100644 --- a/matrix_webhook/formatters.py +++ b/matrix_webhook/formatters.py @@ -1,7 +1,7 @@ """Formatters for matrix webhook.""" -def grafana(data): +def grafana(data, headers): """Pretty-print a grafana notification.""" text = "" if "title" in data: diff --git a/matrix_webhook/handler.py b/matrix_webhook/handler.py index 6ba9c59..ee48710 100644 --- a/matrix_webhook/handler.py +++ b/matrix_webhook/handler.py @@ -35,7 +35,9 @@ async def matrix_webhook(request): if "formatter" in request.rel_url.query: try: - data = getattr(formatters, request.rel_url.query["formatter"])(data) + data = getattr(formatters, request.rel_url.query["formatter"])( + data, request.headers + ) except AttributeError: return utils.create_json_response( HTTPStatus.BAD_REQUEST, "Unknown formatter" From 6b5d6e6e8745e1e17fbbbd2e93039fedbf874ba2 Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Fri, 27 Aug 2021 23:47:07 +0200 Subject: [PATCH 067/184] formatters: add github --- .pre-commit-config.yaml | 2 +- CHANGELOG.md | 3 +- matrix_webhook/formatters.py | 17 ++++++++ matrix_webhook/handler.py | 15 ++++++- tests/example_github_push.json | 1 + tests/example_grafana.json | 22 +++++++++++ tests/test_github.py | 71 ++++++++++++++++++++++++++++++++++ tests/test_grafana.py | 36 ++++------------- tests/tests.py | 2 +- 9 files changed, 136 insertions(+), 33 deletions(-) create mode 100644 tests/example_github_push.json create mode 100644 tests/example_grafana.json create mode 100644 tests/test_github.py diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6e8ad61..e09667c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -23,7 +23,7 @@ repos: hooks: - id: pydocstyle args: - - --ignore=D200,D212 + - --ignore=D200,D203,D212 - repo: https://github.com/PyCQA/flake8 rev: 3.9.2 hooks: diff --git a/CHANGELOG.md b/CHANGELOG.md index f75b3db..ede170d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,10 +6,11 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] -- add grafana formatter +- add github & grafana formatters - add formatted_body to bypass markdown with direct [matrix-custom-HTML](https://matrix.org/docs/spec/client_server/r0.6.1#m-room-message-msgtypes) - allow "key" to be passed as a parameter +- allow to use a sha256 HMAC hex digest with the key instead of the raw key - allow "room_id" to be passed as a parameter or with the data - rename "text" to "body". - Publish releases also on github from github actions diff --git a/matrix_webhook/formatters.py b/matrix_webhook/formatters.py index bec0979..55ba43d 100644 --- a/matrix_webhook/formatters.py +++ b/matrix_webhook/formatters.py @@ -13,3 +13,20 @@ def grafana(data, headers): text = text + "* " + match["metric"] + ": " + str(match["value"]) + "\n" data["body"] = text return data + + +def github(data, headers): + """Pretty-print a github notification.""" + # TODO: Write nice useful formatters. This is only an example. + if headers["X-GitHub-Event"] == "push": + pusher, ref, a, b, c = [ + data[k] for k in ["pusher", "ref", "after", "before", "compare"] + ] + pusher = f"[{pusher['name']}](https://github.com/{pusher['name']})" + data["body"] = f"@{pusher} pushed on {ref}: [{b} → {a}]({c}):\n\n" + for commit in data["commits"]: + data["body"] += f"- [{commit['message']}]({commit['url']})\n" + else: + data["body"] = "notification from github" + data["digest"] = headers["X-Hub-Signature-256"].replace("sha256=", "") + return data diff --git a/matrix_webhook/handler.py b/matrix_webhook/handler.py index ee48710..1f1ac5f 100644 --- a/matrix_webhook/handler.py +++ b/matrix_webhook/handler.py @@ -3,6 +3,7 @@ import json import logging from http import HTTPStatus +from hmac import HMAC from markdown import markdown @@ -18,10 +19,10 @@ async def matrix_webhook(request): This one handles a POST, checks its content, and forwards it to the matrix room. """ LOGGER.debug(f"Handling {request=}") - data = await request.read() + data_b = await request.read() try: - data = json.loads(data.decode()) + data = json.loads(data_b.decode()) except json.decoder.JSONDecodeError: return utils.create_json_response(HTTPStatus.BAD_REQUEST, "Invalid JSON") @@ -48,6 +49,16 @@ async def matrix_webhook(request): if "room_id" not in data: data["room_id"] = request.path.lstrip("/") + # If we get a good SHA-256 HMAC digest, + # we can consider that the sender has the right API key + if "digest" in data: + if data["digest"] == HMAC(conf.API_KEY.encode(), data_b, "sha256").hexdigest(): + data["key"] = conf.API_KEY + else: # but if there is a wrong digest, an informative error should be provided + return utils.create_json_response( + HTTPStatus.UNAUTHORIZED, "Invalid SHA-256 HMAC digest" + ) + missing = [] for key in ["body", "key", "room_id"]: if key not in data or not data[key]: diff --git a/tests/example_github_push.json b/tests/example_github_push.json new file mode 100644 index 0000000..6be68fa --- /dev/null +++ b/tests/example_github_push.json @@ -0,0 +1 @@ +{"ref":"refs/heads/devel","before":"ac7d1d9647008145e9d0cf65d24744d0db4862b8","after":"4bcdb25c809391baaabc264d9309059f9f48ead2","repository":{"id":171114171,"node_id":"MDEwOlJlcG9zaXRvcnkxNzExMTQxNzE=","name":"matrix-webhook","full_name":"nim65s/matrix-webhook","private":false,"owner":{"name":"nim65s","email":"guilhem.saurel@laas.fr","login":"nim65s","id":131929,"node_id":"MDQ6VXNlcjEzMTkyOQ==","avatar_url":"https://avatars.githubusercontent.com/u/131929?v=4","gravatar_id":"","url":"https://api.github.com/users/nim65s","html_url":"https://github.com/nim65s","followers_url":"https://api.github.com/users/nim65s/followers","following_url":"https://api.github.com/users/nim65s/following{/other_user}","gists_url":"https://api.github.com/users/nim65s/gists{/gist_id}","starred_url":"https://api.github.com/users/nim65s/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/nim65s/subscriptions","organizations_url":"https://api.github.com/users/nim65s/orgs","repos_url":"https://api.github.com/users/nim65s/repos","events_url":"https://api.github.com/users/nim65s/events{/privacy}","received_events_url":"https://api.github.com/users/nim65s/received_events","type":"User","site_admin":false},"html_url":"https://github.com/nim65s/matrix-webhook","description":"Post a message to a matrix room with a simple HTTP POST","fork":false,"url":"https://github.com/nim65s/matrix-webhook","forks_url":"https://api.github.com/repos/nim65s/matrix-webhook/forks","keys_url":"https://api.github.com/repos/nim65s/matrix-webhook/keys{/key_id}","collaborators_url":"https://api.github.com/repos/nim65s/matrix-webhook/collaborators{/collaborator}","teams_url":"https://api.github.com/repos/nim65s/matrix-webhook/teams","hooks_url":"https://api.github.com/repos/nim65s/matrix-webhook/hooks","issue_events_url":"https://api.github.com/repos/nim65s/matrix-webhook/issues/events{/number}","events_url":"https://api.github.com/repos/nim65s/matrix-webhook/events","assignees_url":"https://api.github.com/repos/nim65s/matrix-webhook/assignees{/user}","branches_url":"https://api.github.com/repos/nim65s/matrix-webhook/branches{/branch}","tags_url":"https://api.github.com/repos/nim65s/matrix-webhook/tags","blobs_url":"https://api.github.com/repos/nim65s/matrix-webhook/git/blobs{/sha}","git_tags_url":"https://api.github.com/repos/nim65s/matrix-webhook/git/tags{/sha}","git_refs_url":"https://api.github.com/repos/nim65s/matrix-webhook/git/refs{/sha}","trees_url":"https://api.github.com/repos/nim65s/matrix-webhook/git/trees{/sha}","statuses_url":"https://api.github.com/repos/nim65s/matrix-webhook/statuses/{sha}","languages_url":"https://api.github.com/repos/nim65s/matrix-webhook/languages","stargazers_url":"https://api.github.com/repos/nim65s/matrix-webhook/stargazers","contributors_url":"https://api.github.com/repos/nim65s/matrix-webhook/contributors","subscribers_url":"https://api.github.com/repos/nim65s/matrix-webhook/subscribers","subscription_url":"https://api.github.com/repos/nim65s/matrix-webhook/subscription","commits_url":"https://api.github.com/repos/nim65s/matrix-webhook/commits{/sha}","git_commits_url":"https://api.github.com/repos/nim65s/matrix-webhook/git/commits{/sha}","comments_url":"https://api.github.com/repos/nim65s/matrix-webhook/comments{/number}","issue_comment_url":"https://api.github.com/repos/nim65s/matrix-webhook/issues/comments{/number}","contents_url":"https://api.github.com/repos/nim65s/matrix-webhook/contents/{+path}","compare_url":"https://api.github.com/repos/nim65s/matrix-webhook/compare/{base}...{head}","merges_url":"https://api.github.com/repos/nim65s/matrix-webhook/merges","archive_url":"https://api.github.com/repos/nim65s/matrix-webhook/{archive_format}{/ref}","downloads_url":"https://api.github.com/repos/nim65s/matrix-webhook/downloads","issues_url":"https://api.github.com/repos/nim65s/matrix-webhook/issues{/number}","pulls_url":"https://api.github.com/repos/nim65s/matrix-webhook/pulls{/number}","milestones_url":"https://api.github.com/repos/nim65s/matrix-webhook/milestones{/number}","notifications_url":"https://api.github.com/repos/nim65s/matrix-webhook/notifications{?since,all,participating}","labels_url":"https://api.github.com/repos/nim65s/matrix-webhook/labels{/name}","releases_url":"https://api.github.com/repos/nim65s/matrix-webhook/releases{/id}","deployments_url":"https://api.github.com/repos/nim65s/matrix-webhook/deployments","created_at":1550402971,"updated_at":"2021-07-20T22:30:52Z","pushed_at":1630087539,"git_url":"git://github.com/nim65s/matrix-webhook.git","ssh_url":"git@github.com:nim65s/matrix-webhook.git","clone_url":"https://github.com/nim65s/matrix-webhook.git","svn_url":"https://github.com/nim65s/matrix-webhook","homepage":"https://code.ffdn.org/tetaneutral.net/matrix-webhook","size":158,"stargazers_count":17,"watchers_count":17,"language":"Python","has_issues":true,"has_projects":true,"has_downloads":true,"has_wiki":true,"has_pages":false,"forks_count":7,"mirror_url":null,"archived":false,"disabled":false,"open_issues_count":2,"license":{"key":"other","name":"Other","spdx_id":"NOASSERTION","url":null,"node_id":"MDc6TGljZW5zZTA="},"forks":7,"open_issues":2,"watchers":17,"default_branch":"master","stargazers":17,"master_branch":"master"},"pusher":{"name":"nim65s","email":"guilhem.saurel@laas.fr"},"sender":{"login":"nim65s","id":131929,"node_id":"MDQ6VXNlcjEzMTkyOQ==","avatar_url":"https://avatars.githubusercontent.com/u/131929?v=4","gravatar_id":"","url":"https://api.github.com/users/nim65s","html_url":"https://github.com/nim65s","followers_url":"https://api.github.com/users/nim65s/followers","following_url":"https://api.github.com/users/nim65s/following{/other_user}","gists_url":"https://api.github.com/users/nim65s/gists{/gist_id}","starred_url":"https://api.github.com/users/nim65s/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/nim65s/subscriptions","organizations_url":"https://api.github.com/users/nim65s/orgs","repos_url":"https://api.github.com/users/nim65s/repos","events_url":"https://api.github.com/users/nim65s/events{/privacy}","received_events_url":"https://api.github.com/users/nim65s/received_events","type":"User","site_admin":false},"created":false,"deleted":false,"forced":false,"base_ref":null,"compare":"https://github.com/nim65s/matrix-webhook/compare/ac7d1d964700...4bcdb25c8093","commits":[{"id":"4bcdb25c809391baaabc264d9309059f9f48ead2","tree_id":"e423e7482b0231d04dca2caafcdc48a4b064f17b","distinct":true,"message":"formatters: also get headers","timestamp":"2021-08-27T20:05:08+02:00","url":"https://github.com/nim65s/matrix-webhook/commit/4bcdb25c809391baaabc264d9309059f9f48ead2","author":{"name":"Guilhem Saurel","email":"guilhem.saurel@laas.fr","username":"nim65s"},"committer":{"name":"Guilhem Saurel","email":"guilhem.saurel@laas.fr","username":"nim65s"},"added":[],"removed":[],"modified":["README.md","matrix_webhook/formatters.py","matrix_webhook/handler.py"]}],"head_commit":{"id":"4bcdb25c809391baaabc264d9309059f9f48ead2","tree_id":"e423e7482b0231d04dca2caafcdc48a4b064f17b","distinct":true,"message":"formatters: also get headers","timestamp":"2021-08-27T20:05:08+02:00","url":"https://github.com/nim65s/matrix-webhook/commit/4bcdb25c809391baaabc264d9309059f9f48ead2","author":{"name":"Guilhem Saurel","email":"guilhem.saurel@laas.fr","username":"nim65s"},"committer":{"name":"Guilhem Saurel","email":"guilhem.saurel@laas.fr","username":"nim65s"},"added":[],"removed":[],"modified":["README.md","matrix_webhook/formatters.py","matrix_webhook/handler.py"]}} diff --git a/tests/example_grafana.json b/tests/example_grafana.json new file mode 100644 index 0000000..a768bc0 --- /dev/null +++ b/tests/example_grafana.json @@ -0,0 +1,22 @@ +{ + "dashboardId":1, + "evalMatches":[ + { + "value":1, + "metric":"Count", + "tags":{} + } + ], + "imageUrl":"https://grafana.com/assets/img/blog/mixed_styles.png", + "message":"Notification Message", + "orgId":1, + "panelId":2, + "ruleId":1, + "ruleName":"Panel Title alert", + "ruleUrl":"http://localhost:3000/d/hZ7BuVbWz/test-dashboard?fullscreen\u0026edit\u0026tab=alert\u0026panelId=2\u0026orgId=1", + "state":"alerting", + "tags":{ + "tag name":"tag value" + }, + "title":"[Alerting] Panel Title alert" +} diff --git a/tests/test_github.py b/tests/test_github.py new file mode 100644 index 0000000..30158e5 --- /dev/null +++ b/tests/test_github.py @@ -0,0 +1,71 @@ +"""Test module for grafana formatter.""" + +import unittest + +import httpx +import nio + +from .start import BOT_URL, FULL_ID, MATRIX_ID, MATRIX_PW, MATRIX_URL + +SHA256 = "fd7522672889385736be8ffc86d1f8de2e15668864f49af729b5c63e5e0698c4" +EXAMPLE_GITHUB_REQUEST_HEADERS = { + # 'Request URL': 'https://bot.saurel.me/room?formatter=github', + # 'Request method': 'POST', + "Accept": "*/*", + "content-type": "application/json", + "User-Agent": "GitHub-Hookshot/8d33975", + "X-GitHub-Delivery": "636b9b1c-0761-11ec-8a8a-5e435c5ac4f4", + "X-GitHub-Event": "push", + "X-GitHub-Hook-ID": "311845633", + "X-GitHub-Hook-Installation-Target-ID": "171114171", + "X-GitHub-Hook-Installation-Target-Type": "repository", + "X-Hub-Signature": "sha1=ea68fdfcb2f328aaa8f50d176f355e5d4fc95d94", + "X-Hub-Signature-256": f"sha256={SHA256}", +} + + +class GithubFormatterTest(unittest.IsolatedAsyncioTestCase): + """Github formatter test class.""" + + async def test_github_body(self): + """Send a markdown message, and check the result.""" + messages = [] + client = nio.AsyncClient(MATRIX_URL, MATRIX_ID) + + await client.login(MATRIX_PW) + room = await client.room_create() + + with open("tests/example_github_push.json", "rb") as f: + example_github_push = f.read().strip() + + self.assertEqual( + httpx.post( + f"{BOT_URL}/{room.room_id}", + params={ + "formatter": "github", + }, + content=example_github_push, + headers=EXAMPLE_GITHUB_REQUEST_HEADERS, + ).json(), + {"status": 200, "ret": "OK"}, + ) + + sync = await client.sync() + messages = await client.room_messages(room.room_id, sync.next_batch) + await client.close() + + before = "ac7d1d9647008145e9d0cf65d24744d0db4862b8" + after = "4bcdb25c809391baaabc264d9309059f9f48ead2" + GH = "https://github.com" + expected = f'

@nim65s pushed on refs/heads/devel: ' + expected += f'{before} → {after}:

\n" + + message = messages.chunk[0] + self.assertEqual(message.sender, FULL_ID) + self.assertEqual( + message.formatted_body, + expected, + ) diff --git a/tests/test_grafana.py b/tests/test_grafana.py index 33c080c..6cd04cf 100644 --- a/tests/test_grafana.py +++ b/tests/test_grafana.py @@ -1,4 +1,8 @@ -"""Test module for grafana formatter.""" +""" +Test module for grafana formatter. + +ref https://grafana.com/docs/grafana/latest/alerting/old-alerting/notifications/#webhook +""" import unittest @@ -7,32 +11,6 @@ import nio from .start import BOT_URL, FULL_ID, KEY, MATRIX_ID, MATRIX_PW, MATRIX_URL -# ref https://grafana.com/docs/grafana/latest/alerting/old-alerting/notifications/#webhook -EXAMPLE_GRAFANA_REQUEST = """ -{ - "dashboardId":1, - "evalMatches":[ - { - "value":1, - "metric":"Count", - "tags":{} - } - ], - "imageUrl":"https://grafana.com/assets/img/blog/mixed_styles.png", - "message":"Notification Message", - "orgId":1, - "panelId":2, - "ruleId":1, - "ruleName":"Panel Title alert", - "ruleUrl":"http://localhost:3000/d/hZ7BuVbWz/test-dashboard?fullscreen\u0026edit\u0026tab=alert\u0026panelId=2\u0026orgId=1", - "state":"alerting", - "tags":{ - "tag name":"tag value" - }, - "title":"[Alerting] Panel Title alert" -} -""" - class GrafanaFormatterTest(unittest.IsolatedAsyncioTestCase): """Grafana formatter test class.""" @@ -45,11 +23,13 @@ class GrafanaFormatterTest(unittest.IsolatedAsyncioTestCase): await client.login(MATRIX_PW) room = await client.room_create() + with open("tests/example_grafana.json") as f: + example_grafana_request = f.read() self.assertEqual( httpx.post( f"{BOT_URL}/{room.room_id}", params={"formatter": "grafana", "key": KEY}, - content=EXAMPLE_GRAFANA_REQUEST, + content=example_grafana_request, ).json(), {"status": 200, "ret": "OK"}, ) diff --git a/tests/tests.py b/tests/tests.py index 01993c3..aa5991c 100644 --- a/tests/tests.py +++ b/tests/tests.py @@ -86,7 +86,7 @@ class BotTest(unittest.IsolatedAsyncioTestCase): self.assertEqual(message.formatted_body, "

Hello

") async def test_room_id_parameter(self): - """Send a markdown message in a room given as parameter, and check the result.""" + """Send a markdown message in a room given as parameter.""" body = "# Hello" messages = [] client = nio.AsyncClient(MATRIX_URL, MATRIX_ID) From cda88980195fd8b1ad49c5a852d379f00f836e19 Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Fri, 27 Aug 2021 23:56:56 +0200 Subject: [PATCH 068/184] tests: improve coverage --- tests/test_github.py | 94 +++++++++++++++++++++++++++++++++++--------- 1 file changed, 76 insertions(+), 18 deletions(-) diff --git a/tests/test_github.py b/tests/test_github.py index 30158e5..fed01d7 100644 --- a/tests/test_github.py +++ b/tests/test_github.py @@ -8,27 +8,31 @@ import nio from .start import BOT_URL, FULL_ID, MATRIX_ID, MATRIX_PW, MATRIX_URL SHA256 = "fd7522672889385736be8ffc86d1f8de2e15668864f49af729b5c63e5e0698c4" -EXAMPLE_GITHUB_REQUEST_HEADERS = { - # 'Request URL': 'https://bot.saurel.me/room?formatter=github', - # 'Request method': 'POST', - "Accept": "*/*", - "content-type": "application/json", - "User-Agent": "GitHub-Hookshot/8d33975", - "X-GitHub-Delivery": "636b9b1c-0761-11ec-8a8a-5e435c5ac4f4", - "X-GitHub-Event": "push", - "X-GitHub-Hook-ID": "311845633", - "X-GitHub-Hook-Installation-Target-ID": "171114171", - "X-GitHub-Hook-Installation-Target-Type": "repository", - "X-Hub-Signature": "sha1=ea68fdfcb2f328aaa8f50d176f355e5d4fc95d94", - "X-Hub-Signature-256": f"sha256={SHA256}", -} + + +def headers(sha256=SHA256, event="push"): + """Mock headers from github webhooks.""" + return { + # 'Request URL': 'https://bot.saurel.me/room?formatter=github', + # 'Request method': 'POST', + "Accept": "*/*", + "content-type": "application/json", + "User-Agent": "GitHub-Hookshot/8d33975", + "X-GitHub-Delivery": "636b9b1c-0761-11ec-8a8a-5e435c5ac4f4", + "X-GitHub-Event": event, + "X-GitHub-Hook-ID": "311845633", + "X-GitHub-Hook-Installation-Target-ID": "171114171", + "X-GitHub-Hook-Installation-Target-Type": "repository", + "X-Hub-Signature": "sha1=ea68fdfcb2f328aaa8f50d176f355e5d4fc95d94", + "X-Hub-Signature-256": f"sha256={sha256}", + } class GithubFormatterTest(unittest.IsolatedAsyncioTestCase): """Github formatter test class.""" - async def test_github_body(self): - """Send a markdown message, and check the result.""" + async def test_github_notification(self): + """Send a mock github webhook, and check the result.""" messages = [] client = nio.AsyncClient(MATRIX_URL, MATRIX_ID) @@ -37,7 +41,6 @@ class GithubFormatterTest(unittest.IsolatedAsyncioTestCase): with open("tests/example_github_push.json", "rb") as f: example_github_push = f.read().strip() - self.assertEqual( httpx.post( f"{BOT_URL}/{room.room_id}", @@ -45,7 +48,40 @@ class GithubFormatterTest(unittest.IsolatedAsyncioTestCase): "formatter": "github", }, content=example_github_push, - headers=EXAMPLE_GITHUB_REQUEST_HEADERS, + headers=headers(event="something else"), + ).json(), + {"status": 200, "ret": "OK"}, + ) + + sync = await client.sync() + messages = await client.room_messages(room.room_id, sync.next_batch) + await client.close() + + message = messages.chunk[0] + self.assertEqual(message.sender, FULL_ID) + self.assertEqual( + message.formatted_body, + "

notification from github

", + ) + + async def test_github_push(self): + """Send a mock github push webhook, and check the result.""" + messages = [] + client = nio.AsyncClient(MATRIX_URL, MATRIX_ID) + + await client.login(MATRIX_PW) + room = await client.room_create() + + with open("tests/example_github_push.json", "rb") as f: + example_github_push = f.read().strip() + self.assertEqual( + httpx.post( + f"{BOT_URL}/{room.room_id}", + params={ + "formatter": "github", + }, + content=example_github_push, + headers=headers(), ).json(), {"status": 200, "ret": "OK"}, ) @@ -69,3 +105,25 @@ class GithubFormatterTest(unittest.IsolatedAsyncioTestCase): message.formatted_body, expected, ) + + async def test_github_wrong_digest(self): + """Send a mock github push webhook with a wrong digest.""" + client = nio.AsyncClient(MATRIX_URL, MATRIX_ID) + + await client.login(MATRIX_PW) + room = await client.room_create() + + with open("tests/example_github_push.json", "rb") as f: + example_github_push = f.read().strip() + + self.assertEqual( + httpx.post( + f"{BOT_URL}/{room.room_id}", + params={ + "formatter": "github", + }, + content=example_github_push, + headers=headers("wrong digest"), + ).json(), + {"status": 401, "ret": "Invalid SHA-256 HMAC digest"}, + ) From 4928ceb91a8b6d0a70338a6fa583a8fbdb173c31 Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Fri, 27 Aug 2021 23:59:49 +0200 Subject: [PATCH 069/184] release: details --- docs/release.sh | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/release.sh b/docs/release.sh index c1cb632..40f05e2 100755 --- a/docs/release.sh +++ b/docs/release.sh @@ -1,12 +1,12 @@ #!/bin/bash -eux # ./docs/release.sh [patch|minor|major|x.y.z] -[[ $(basename $PWD) == docs ]] && cd .. +[[ $(basename "$PWD") == docs ]] && cd .. OLD=$(poetry version -s) -poetry version $1 +poetry version "$1" NEW=$(poetry version -s) DATE=$(date +%Y-%m-%d) @@ -15,6 +15,6 @@ sed -i "/^## \[Unreleased\]/a \\\n## [$NEW] - $DATE" CHANGELOG.md sed -i "/^\[Unreleased\]/s/$OLD/$NEW/" CHANGELOG.md sed -i "/^\[Unreleased\]/a [$NEW] https://github.com/nim65s/matrix-webhook/compare/v$OLD...v$NEW" CHANGELOG.md -echo git add pyproject.toml CHANGELOG.md -echo git commit -m "Release v$NEW" -echo git tag -s "v$NEW" -m "Release v$NEW" +git add pyproject.toml CHANGELOG.md +git commit -m "Release v$NEW" +git tag -s "v$NEW" -m "Release v$NEW" From 7f5c8583a110d55cb0e69f2cbebc023dba9fb21c Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Fri, 27 Aug 2021 23:59:56 +0200 Subject: [PATCH 070/184] Release v3.2.0 --- CHANGELOG.md | 5 ++++- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index ede170d..64dc15f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +## [3.2.0] - 2021-08-27 + - add github & grafana formatters - add formatted_body to bypass markdown with direct [matrix-custom-HTML](https://matrix.org/docs/spec/client_server/r0.6.1#m-room-message-msgtypes) @@ -48,7 +50,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [1.0.0] - 2020-02-14 - First release with matrix-client & http.server -[Unreleased]: https://github.com/nim65s/matrix-webhook/compare/v3.1.1...master +[Unreleased]: https://github.com/nim65s/matrix-webhook/compare/v3.2.0...master +[3.2.0] https://github.com/nim65s/matrix-webhook/compare/v3.1.1...v3.2.0 [3.1.1]: https://github.com/nim65s/matrix-webhook/compare/v3.1.0...v3.1.1 [3.1.0]: https://github.com/nim65s/matrix-webhook/compare/v3.0.0...v3.1.0 [3.0.0]: https://github.com/nim65s/matrix-webhook/compare/v2.0.0...v3.0.0 diff --git a/pyproject.toml b/pyproject.toml index 01252ba..4d815bb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "matrix-webhook" -version = "3.1.1" +version = "3.2.0" description = "Post a message to a matrix room with a simple HTTP POST" authors = ["Guilhem Saurel "] license = "BSD-2-Clause" From 7e1be831df74a85d60f271ae8697237cc8f49759 Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Sat, 28 Aug 2021 00:05:20 +0200 Subject: [PATCH 071/184] ci: detail --- .github/workflows/docker-hub.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/docker-hub.yml b/.github/workflows/docker-hub.yml index 8fa86ac..9be581b 100644 --- a/.github/workflows/docker-hub.yml +++ b/.github/workflows/docker-hub.yml @@ -1,4 +1,4 @@ -name: Publish +name: Publish on Docker Hub on: push: From 1c00ff22f3276024c58bb42ed3cbacb32116c8b8 Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Sat, 28 Aug 2021 00:06:38 +0200 Subject: [PATCH 072/184] fix changelog --- CHANGELOG.md | 26 ++++++++++++++------------ docs/release.sh | 4 ++-- 2 files changed, 16 insertions(+), 14 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 64dc15f..3b4554b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,7 +6,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] -## [3.2.0] - 2021-08-27 +- fix changelog + +## [v3.2.0] - 2021-08-27 - add github & grafana formatters - add formatted_body to bypass markdown with direct @@ -18,15 +20,15 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Publish releases also on github from github actions - fix tests for recent synapse docker image -## [3.1.1] - 2021-07-18 +## [v3.1.1] - 2021-07-18 -## [3.1.0] - 2021-07-18 +## [v3.1.0] - 2021-07-18 - Publish on PyPI & Docker Hub with Github Actions in [#10](https://github.com/nim65s/matrix-webhook/pull/10) by [@nim65s](https://github.com/nim65s) -## [3.0.0] - 2021-07-18 +## [v3.0.0] - 2021-07-18 - Simplify code in [#1](https://github.com/nim65s/matrix-webhook/pull/1) @@ -44,16 +46,16 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 in [#9](https://github.com/nim65s/matrix-webhook/pull/9) by [@nim65s](https://github.com/nim65s) -## [2.0.0] - 2020-03-14 +## [v2.0.0] - 2020-03-14 - Update to matrix-nio & aiohttp & markdown -## [1.0.0] - 2020-02-14 +## [v1.0.0] - 2020-02-14 - First release with matrix-client & http.server [Unreleased]: https://github.com/nim65s/matrix-webhook/compare/v3.2.0...master -[3.2.0] https://github.com/nim65s/matrix-webhook/compare/v3.1.1...v3.2.0 -[3.1.1]: https://github.com/nim65s/matrix-webhook/compare/v3.1.0...v3.1.1 -[3.1.0]: https://github.com/nim65s/matrix-webhook/compare/v3.0.0...v3.1.0 -[3.0.0]: https://github.com/nim65s/matrix-webhook/compare/v2.0.0...v3.0.0 -[2.0.0]: https://github.com/nim65s/matrix-webhook/compare/v1.0.0...v2.0.0 -[1.0.0]: https://github.com/nim65s/matrix-webhook/releases/tag/v1.0.0 +[v3.2.0] https://github.com/nim65s/matrix-webhook/compare/v3.1.1...v3.2.0 +[v3.1.1]: https://github.com/nim65s/matrix-webhook/compare/v3.1.0...v3.1.1 +[v3.1.0]: https://github.com/nim65s/matrix-webhook/compare/v3.0.0...v3.1.0 +[v3.0.0]: https://github.com/nim65s/matrix-webhook/compare/v2.0.0...v3.0.0 +[v2.0.0]: https://github.com/nim65s/matrix-webhook/compare/v1.0.0...v2.0.0 +[v1.0.0]: https://github.com/nim65s/matrix-webhook/releases/tag/v1.0.0 diff --git a/docs/release.sh b/docs/release.sh index 40f05e2..26b7b83 100755 --- a/docs/release.sh +++ b/docs/release.sh @@ -11,9 +11,9 @@ poetry version "$1" NEW=$(poetry version -s) DATE=$(date +%Y-%m-%d) -sed -i "/^## \[Unreleased\]/a \\\n## [$NEW] - $DATE" CHANGELOG.md +sed -i "/^## \[Unreleased\]/a \\\n## [v$NEW] - $DATE" CHANGELOG.md sed -i "/^\[Unreleased\]/s/$OLD/$NEW/" CHANGELOG.md -sed -i "/^\[Unreleased\]/a [$NEW] https://github.com/nim65s/matrix-webhook/compare/v$OLD...v$NEW" CHANGELOG.md +sed -i "/^\[Unreleased\]/a [v$NEW] https://github.com/nim65s/matrix-webhook/compare/v$OLD...v$NEW" CHANGELOG.md git add pyproject.toml CHANGELOG.md git commit -m "Release v$NEW" From d726db6ed222c0ba543ecd6fe5fef7587c8bcab4 Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Sat, 28 Aug 2021 00:07:30 +0200 Subject: [PATCH 073/184] release: push --- docs/release.sh | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/release.sh b/docs/release.sh index 26b7b83..933e083 100755 --- a/docs/release.sh +++ b/docs/release.sh @@ -18,3 +18,5 @@ sed -i "/^\[Unreleased\]/a [v$NEW] https://github.com/nim65s/matrix-webhook/comp git add pyproject.toml CHANGELOG.md git commit -m "Release v$NEW" git tag -s "v$NEW" -m "Release v$NEW" +git push +git push --tags From 58d0e83f8fe0208fdcd202ddcef35548f2f51af2 Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Sat, 28 Aug 2021 00:07:36 +0200 Subject: [PATCH 074/184] Release v3.2.1 --- CHANGELOG.md | 5 ++++- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3b4554b..d55ec76 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +## [v3.2.1] - 2021-08-28 + - fix changelog ## [v3.2.0] - 2021-08-27 @@ -52,7 +54,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [v1.0.0] - 2020-02-14 - First release with matrix-client & http.server -[Unreleased]: https://github.com/nim65s/matrix-webhook/compare/v3.2.0...master +[Unreleased]: https://github.com/nim65s/matrix-webhook/compare/v3.2.1...master +[v3.2.1] https://github.com/nim65s/matrix-webhook/compare/v3.2.0...v3.2.1 [v3.2.0] https://github.com/nim65s/matrix-webhook/compare/v3.1.1...v3.2.0 [v3.1.1]: https://github.com/nim65s/matrix-webhook/compare/v3.1.0...v3.1.1 [v3.1.0]: https://github.com/nim65s/matrix-webhook/compare/v3.0.0...v3.1.0 diff --git a/pyproject.toml b/pyproject.toml index 4d815bb..2994446 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "matrix-webhook" -version = "3.2.0" +version = "3.2.1" description = "Post a message to a matrix room with a simple HTTP POST" authors = ["Guilhem Saurel "] license = "BSD-2-Clause" From df8ce523cb9ad6d9cfbda213744c3da9f444438b Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Sat, 28 Aug 2021 00:08:56 +0200 Subject: [PATCH 075/184] fix changelog --- CHANGELOG.md | 4 ++-- docs/release.sh | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index d55ec76..646d4fb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -55,8 +55,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - First release with matrix-client & http.server [Unreleased]: https://github.com/nim65s/matrix-webhook/compare/v3.2.1...master -[v3.2.1] https://github.com/nim65s/matrix-webhook/compare/v3.2.0...v3.2.1 -[v3.2.0] https://github.com/nim65s/matrix-webhook/compare/v3.1.1...v3.2.0 +[v3.2.1]: https://github.com/nim65s/matrix-webhook/compare/v3.2.0...v3.2.1 +[v3.2.0]: https://github.com/nim65s/matrix-webhook/compare/v3.1.1...v3.2.0 [v3.1.1]: https://github.com/nim65s/matrix-webhook/compare/v3.1.0...v3.1.1 [v3.1.0]: https://github.com/nim65s/matrix-webhook/compare/v3.0.0...v3.1.0 [v3.0.0]: https://github.com/nim65s/matrix-webhook/compare/v2.0.0...v3.0.0 diff --git a/docs/release.sh b/docs/release.sh index 933e083..c2ef8e3 100755 --- a/docs/release.sh +++ b/docs/release.sh @@ -13,7 +13,7 @@ DATE=$(date +%Y-%m-%d) sed -i "/^## \[Unreleased\]/a \\\n## [v$NEW] - $DATE" CHANGELOG.md sed -i "/^\[Unreleased\]/s/$OLD/$NEW/" CHANGELOG.md -sed -i "/^\[Unreleased\]/a [v$NEW] https://github.com/nim65s/matrix-webhook/compare/v$OLD...v$NEW" CHANGELOG.md +sed -i "/^\[Unreleased\]/a [v$NEW]: https://github.com/nim65s/matrix-webhook/compare/v$OLD...v$NEW" CHANGELOG.md git add pyproject.toml CHANGELOG.md git commit -m "Release v$NEW" From 0aea63903dc31fedb3f53200d3af4c7e8cd01f59 Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Sat, 28 Aug 2021 00:11:30 +0200 Subject: [PATCH 076/184] badges: add PyPI --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 62f7dbd..12fc780 100644 --- a/README.md +++ b/README.md @@ -6,6 +6,7 @@ [![Release](https://github.com/nim65s/matrix-webhook/actions/workflows/release.yml/badge.svg)](https://pypi.org/project/matrix-webhook/) [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) [![codecov](https://codecov.io/gh/nim65s/matrix-webhook/branch/master/graph/badge.svg?token=BLGISGCYKG)](https://codecov.io/gh/nim65s/matrix-webhook) +[![PyPI version](https://badge.fury.io/py/matrix-webhook.svg)](https://badge.fury.io/py/matrix-webhook) Post a message to a matrix room with a simple HTTP POST From 8592be257fdd5e6d0af26e3ac3a1b5d94dcc219f Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Sat, 28 Aug 2021 00:15:41 +0200 Subject: [PATCH 077/184] badges: new line --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 12fc780..1971405 100644 --- a/README.md +++ b/README.md @@ -4,6 +4,7 @@ [![Lints](https://github.com/nim65s/matrix-webhook/actions/workflows/lint.yml/badge.svg)](https://github.com/nim65s/matrix-webhook/actions/workflows/lint.yml) [![Docker-Hub](https://github.com/nim65s/matrix-webhook/actions/workflows/docker-hub.yml/badge.svg)](https://hub.docker.com/r/nim65s/matrix-webhook) [![Release](https://github.com/nim65s/matrix-webhook/actions/workflows/release.yml/badge.svg)](https://pypi.org/project/matrix-webhook/) + [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) [![codecov](https://codecov.io/gh/nim65s/matrix-webhook/branch/master/graph/badge.svg?token=BLGISGCYKG)](https://codecov.io/gh/nim65s/matrix-webhook) [![PyPI version](https://badge.fury.io/py/matrix-webhook.svg)](https://badge.fury.io/py/matrix-webhook) From 179ef11ae70b6f7fcfccff641769497a16f292a8 Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Sat, 28 Aug 2021 00:18:42 +0200 Subject: [PATCH 078/184] readme: typo --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 1971405..6c66d3b 100644 --- a/README.md +++ b/README.md @@ -87,7 +87,7 @@ Add a JSON webhook with `?formatter=github`, and put the `API_KEY` as secret ### For Grafana -Add a webhook with an URL ending with `?formatter=grafana&key=API_KEY' +Add a webhook with an URL ending with `?formatter=grafana&key=API_KEY` ## Test room From ef79d39a9f6cecaaf54c70f1dca5ed46896ad499 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 30 Aug 2021 17:11:56 +0000 Subject: [PATCH 079/184] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/psf/black: 21.7b0 → 21.8b0](https://github.com/psf/black/compare/21.7b0...21.8b0) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index e09667c..7340a47 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -14,7 +14,7 @@ repos: - id: mixed-line-ending - id: trailing-whitespace - repo: https://github.com/psf/black - rev: 21.7b0 + rev: 21.8b0 hooks: - id: black language_version: python3 From 7aa5df3871da80b3630049c3fc6685ea9485c7f2 Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Sat, 28 Aug 2021 00:38:39 +0200 Subject: [PATCH 080/184] readme: detail --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 6c66d3b..6e3db88 100644 --- a/README.md +++ b/README.md @@ -91,7 +91,7 @@ Add a webhook with an URL ending with `?formatter=grafana&key=API_KEY` ## Test room -[#matrix-webhook:tetaneutral.net](https://matrix.to/#/!DPrUlnwOhBEfYwsDLh:matrix.org?via=laas.fr&via=tetaneutral.net&via=aen.im) +[#matrix-webhook:tetaneutral.net](https://matrix.to/#/!DPrUlnwOhBEfYwsDLh:matrix.org) ## Unit tests From d0c4cd42274cc2261aab755190e92c3daca08b08 Mon Sep 17 00:00:00 2001 From: Guilhem Saurel Date: Sat, 28 Aug 2021 00:40:56 +0200 Subject: [PATCH 081/184] formatters: fix github format --- matrix_webhook/formatters.py | 4 ++-- tests/test_github.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/matrix_webhook/formatters.py b/matrix_webhook/formatters.py index 55ba43d..2d52788 100644 --- a/matrix_webhook/formatters.py +++ b/matrix_webhook/formatters.py @@ -22,8 +22,8 @@ def github(data, headers): pusher, ref, a, b, c = [ data[k] for k in ["pusher", "ref", "after", "before", "compare"] ] - pusher = f"[{pusher['name']}](https://github.com/{pusher['name']})" - data["body"] = f"@{pusher} pushed on {ref}: [{b} → {a}]({c}):\n\n" + pusher = f"[@{pusher['name']}](https://github.com/{pusher['name']})" + data["body"] = f"{pusher} pushed on {ref}: [{b} → {a}]({c}):\n\n" for commit in data["commits"]: data["body"] += f"- [{commit['message']}]({commit['url']})\n" else: diff --git a/tests/test_github.py b/tests/test_github.py index fed01d7..3943202 100644 --- a/tests/test_github.py +++ b/tests/test_github.py @@ -93,7 +93,7 @@ class GithubFormatterTest(unittest.IsolatedAsyncioTestCase): before = "ac7d1d9647008145e9d0cf65d24744d0db4862b8" after = "4bcdb25c809391baaabc264d9309059f9f48ead2" GH = "https://github.com" - expected = f'

@nim65s pushed on refs/heads/devel: ' + expected = f'

@nim65s pushed on refs/heads/devel: ' expected += f'{before} → {after}:

\n