mirror of
https://github.com/element-hq/synapse.git
synced 2024-11-28 23:20:09 +03:00
Merge remote-tracking branch 'origin/release-v0.12.0'
This commit is contained in:
commit
f35f8d06ea
86 changed files with 2612 additions and 883 deletions
|
@ -48,3 +48,6 @@ Muthu Subramanian <muthu.subramanian.karunanidhi at ericsson.com>
|
||||||
|
|
||||||
Steven Hammerton <steven.hammerton at openmarket.com>
|
Steven Hammerton <steven.hammerton at openmarket.com>
|
||||||
* Add CAS support for registration and login.
|
* Add CAS support for registration and login.
|
||||||
|
|
||||||
|
Mads Robin Christensen <mads at v42 dot dk>
|
||||||
|
* CentOS 7 installation instructions.
|
||||||
|
|
74
CHANGES.rst
74
CHANGES.rst
|
@ -1,3 +1,77 @@
|
||||||
|
Changes in synapse v0.12.0 (2016-01-04)
|
||||||
|
=======================================
|
||||||
|
|
||||||
|
* Expose ``/login`` under ``r0`` (PR #459)
|
||||||
|
|
||||||
|
Changes in synapse v0.12.0-rc3 (2015-12-23)
|
||||||
|
===========================================
|
||||||
|
|
||||||
|
* Allow guest accounts access to ``/sync`` (PR #455)
|
||||||
|
* Allow filters to include/exclude rooms at the room level
|
||||||
|
rather than just from the components of the sync for each
|
||||||
|
room. (PR #454)
|
||||||
|
* Include urls for room avatars in the response to ``/publicRooms`` (PR #453)
|
||||||
|
* Don't set a identicon as the avatar for a user when they register (PR #450)
|
||||||
|
* Add a ``display_name`` to third-party invites (PR #449)
|
||||||
|
* Send more information to the identity server for third-party invites so that
|
||||||
|
it can send richer messages to the invitee (PR #446)
|
||||||
|
|
||||||
|
* Cache the responses to ``/intialSync`` for 5 minutes. If a client
|
||||||
|
retries a request to ``/initialSync`` before the a response was computed
|
||||||
|
to the first request then the same response is used for both requests
|
||||||
|
(PR #457)
|
||||||
|
|
||||||
|
* Fix a bug where synapse would always request the signing keys of
|
||||||
|
remote servers even when the key was cached locally (PR #452)
|
||||||
|
* Fix 500 when pagination search results (PR #447)
|
||||||
|
* Fix a bug where synapse was leaking raw email address in third-party invites
|
||||||
|
(PR #448)
|
||||||
|
|
||||||
|
Changes in synapse v0.12.0-rc2 (2015-12-14)
|
||||||
|
===========================================
|
||||||
|
|
||||||
|
* Add caches for whether rooms have been forgotten by a user (PR #434)
|
||||||
|
* Remove instructions to use ``--process-dependency-link`` since all of the
|
||||||
|
dependencies of synapse are on PyPI (PR #436)
|
||||||
|
* Parallelise the processing of ``/sync`` requests (PR #437)
|
||||||
|
* Fix race updating presence in ``/events`` (PR #444)
|
||||||
|
* Fix bug back-populating search results (PR #441)
|
||||||
|
* Fix bug calculating state in ``/sync`` requests (PR #442)
|
||||||
|
|
||||||
|
Changes in synapse v0.12.0-rc1 (2015-12-10)
|
||||||
|
===========================================
|
||||||
|
|
||||||
|
* Host the client APIs released as r0 by
|
||||||
|
https://matrix.org/docs/spec/r0.0.0/client_server.html
|
||||||
|
on paths prefixed by ``/_matrix/client/r0``. (PR #430, PR #415, PR #400)
|
||||||
|
* Updates the client APIs to match r0 of the matrix specification.
|
||||||
|
|
||||||
|
* All APIs return events in the new event format, old APIs also include
|
||||||
|
the fields needed to parse the event using the old format for
|
||||||
|
compatibility. (PR #402)
|
||||||
|
* Search results are now given as a JSON array rather than
|
||||||
|
a JSON object (PR #405)
|
||||||
|
* Miscellaneous changes to search (PR #403, PR #406, PR #412)
|
||||||
|
* Filter JSON objects may now be passed as query parameters to ``/sync``
|
||||||
|
(PR #431)
|
||||||
|
* Fix implementation of ``/admin/whois`` (PR #418)
|
||||||
|
* Only include the rooms that user has left in ``/sync`` if the client
|
||||||
|
requests them in the filter (PR #423)
|
||||||
|
* Don't push for ``m.room.message`` by default (PR #411)
|
||||||
|
* Add API for setting per account user data (PR #392)
|
||||||
|
* Allow users to forget rooms (PR #385)
|
||||||
|
|
||||||
|
* Performance improvements and monitoring:
|
||||||
|
|
||||||
|
* Add per-request counters for CPU time spent on the main python thread.
|
||||||
|
(PR #421, PR #420)
|
||||||
|
* Add per-request counters for time spent in the database (PR #429)
|
||||||
|
* Make state updates in the C+S API idempotent (PR #416)
|
||||||
|
* Only fire ``user_joined_room`` if the user has actually joined. (PR #410)
|
||||||
|
* Reuse a single http client, rather than creating new ones (PR #413)
|
||||||
|
|
||||||
|
* Fixed a bug upgrading from older versions of synapse on postgresql (PR #417)
|
||||||
|
|
||||||
Changes in synapse v0.11.1 (2015-11-20)
|
Changes in synapse v0.11.1 (2015-11-20)
|
||||||
=======================================
|
=======================================
|
||||||
|
|
||||||
|
|
48
README.rst
48
README.rst
|
@ -111,6 +111,14 @@ Installing prerequisites on ArchLinux::
|
||||||
sudo pacman -S base-devel python2 python-pip \
|
sudo pacman -S base-devel python2 python-pip \
|
||||||
python-setuptools python-virtualenv sqlite3
|
python-setuptools python-virtualenv sqlite3
|
||||||
|
|
||||||
|
Installing prerequisites on CentOS 7::
|
||||||
|
|
||||||
|
sudo yum install libtiff-devel libjpeg-devel libzip-devel freetype-devel \
|
||||||
|
lcms2-devel libwebp-devel tcl-devel tk-devel \
|
||||||
|
python-virtualenv libffi-devel openssl-devel
|
||||||
|
sudo yum groupinstall "Development Tools"
|
||||||
|
|
||||||
|
|
||||||
Installing prerequisites on Mac OS X::
|
Installing prerequisites on Mac OS X::
|
||||||
|
|
||||||
xcode-select --install
|
xcode-select --install
|
||||||
|
@ -122,7 +130,7 @@ To install the synapse homeserver run::
|
||||||
virtualenv -p python2.7 ~/.synapse
|
virtualenv -p python2.7 ~/.synapse
|
||||||
source ~/.synapse/bin/activate
|
source ~/.synapse/bin/activate
|
||||||
pip install --upgrade setuptools
|
pip install --upgrade setuptools
|
||||||
pip install --process-dependency-links https://github.com/matrix-org/synapse/tarball/master
|
pip install https://github.com/matrix-org/synapse/tarball/master
|
||||||
|
|
||||||
This installs synapse, along with the libraries it uses, into a virtual
|
This installs synapse, along with the libraries it uses, into a virtual
|
||||||
environment under ``~/.synapse``. Feel free to pick a different directory
|
environment under ``~/.synapse``. Feel free to pick a different directory
|
||||||
|
@ -148,9 +156,10 @@ To set up your homeserver, run (in your virtualenv, as before)::
|
||||||
python -m synapse.app.homeserver \
|
python -m synapse.app.homeserver \
|
||||||
--server-name machine.my.domain.name \
|
--server-name machine.my.domain.name \
|
||||||
--config-path homeserver.yaml \
|
--config-path homeserver.yaml \
|
||||||
--generate-config
|
--generate-config \
|
||||||
|
--report-stats=[yes|no]
|
||||||
|
|
||||||
Substituting your host and domain name as appropriate.
|
...substituting your host and domain name as appropriate.
|
||||||
|
|
||||||
This will generate you a config file that you can then customise, but it will
|
This will generate you a config file that you can then customise, but it will
|
||||||
also generate a set of keys for you. These keys will allow your Home Server to
|
also generate a set of keys for you. These keys will allow your Home Server to
|
||||||
|
@ -163,10 +172,11 @@ key in the <server name>.signing.key file (the second word, which by default is
|
||||||
|
|
||||||
By default, registration of new users is disabled. You can either enable
|
By default, registration of new users is disabled. You can either enable
|
||||||
registration in the config by specifying ``enable_registration: true``
|
registration in the config by specifying ``enable_registration: true``
|
||||||
(it is then recommended to also set up CAPTCHA), or
|
(it is then recommended to also set up CAPTCHA - see docs/CAPTCHA_SETUP), or
|
||||||
you can use the command line to register new users::
|
you can use the command line to register new users::
|
||||||
|
|
||||||
$ source ~/.synapse/bin/activate
|
$ source ~/.synapse/bin/activate
|
||||||
|
$ synctl start # if not already running
|
||||||
$ register_new_matrix_user -c homeserver.yaml https://localhost:8448
|
$ register_new_matrix_user -c homeserver.yaml https://localhost:8448
|
||||||
New user localpart: erikj
|
New user localpart: erikj
|
||||||
Password:
|
Password:
|
||||||
|
@ -176,6 +186,16 @@ you can use the command line to register new users::
|
||||||
For reliable VoIP calls to be routed via this homeserver, you MUST configure
|
For reliable VoIP calls to be routed via this homeserver, you MUST configure
|
||||||
a TURN server. See docs/turn-howto.rst for details.
|
a TURN server. See docs/turn-howto.rst for details.
|
||||||
|
|
||||||
|
Running Synapse
|
||||||
|
===============
|
||||||
|
|
||||||
|
To actually run your new homeserver, pick a working directory for Synapse to
|
||||||
|
run (e.g. ``~/.synapse``), and::
|
||||||
|
|
||||||
|
cd ~/.synapse
|
||||||
|
source ./bin/activate
|
||||||
|
synctl start
|
||||||
|
|
||||||
Using PostgreSQL
|
Using PostgreSQL
|
||||||
================
|
================
|
||||||
|
|
||||||
|
@ -198,16 +218,6 @@ may have a few regressions relative to SQLite.
|
||||||
For information on how to install and use PostgreSQL, please see
|
For information on how to install and use PostgreSQL, please see
|
||||||
`docs/postgres.rst <docs/postgres.rst>`_.
|
`docs/postgres.rst <docs/postgres.rst>`_.
|
||||||
|
|
||||||
Running Synapse
|
|
||||||
===============
|
|
||||||
|
|
||||||
To actually run your new homeserver, pick a working directory for Synapse to
|
|
||||||
run (e.g. ``~/.synapse``), and::
|
|
||||||
|
|
||||||
cd ~/.synapse
|
|
||||||
source ./bin/activate
|
|
||||||
synctl start
|
|
||||||
|
|
||||||
Platform Specific Instructions
|
Platform Specific Instructions
|
||||||
==============================
|
==============================
|
||||||
|
|
||||||
|
@ -229,8 +239,7 @@ pip may be outdated (6.0.7-1 and needs to be upgraded to 6.0.8-1 )::
|
||||||
You also may need to explicitly specify python 2.7 again during the install
|
You also may need to explicitly specify python 2.7 again during the install
|
||||||
request::
|
request::
|
||||||
|
|
||||||
pip2.7 install --process-dependency-links \
|
pip2.7 install https://github.com/matrix-org/synapse/tarball/master
|
||||||
https://github.com/matrix-org/synapse/tarball/master
|
|
||||||
|
|
||||||
If you encounter an error with lib bcrypt causing an Wrong ELF Class:
|
If you encounter an error with lib bcrypt causing an Wrong ELF Class:
|
||||||
ELFCLASS32 (x64 Systems), you may need to reinstall py-bcrypt to correctly
|
ELFCLASS32 (x64 Systems), you may need to reinstall py-bcrypt to correctly
|
||||||
|
@ -289,8 +298,7 @@ Troubleshooting
|
||||||
Troubleshooting Installation
|
Troubleshooting Installation
|
||||||
----------------------------
|
----------------------------
|
||||||
|
|
||||||
Synapse requires pip 1.7 or later, so if your OS provides too old a version and
|
Synapse requires pip 1.7 or later, so if your OS provides too old a version you
|
||||||
you get errors about ``error: no such option: --process-dependency-links`` you
|
|
||||||
may need to manually upgrade it::
|
may need to manually upgrade it::
|
||||||
|
|
||||||
sudo pip install --upgrade pip
|
sudo pip install --upgrade pip
|
||||||
|
@ -434,6 +442,10 @@ SRV record, as that is the name other machines will expect it to have::
|
||||||
python -m synapse.app.homeserver --config-path homeserver.yaml
|
python -m synapse.app.homeserver --config-path homeserver.yaml
|
||||||
|
|
||||||
|
|
||||||
|
If you've already generated the config file, you need to edit the "server_name"
|
||||||
|
in you ```homeserver.yaml``` file. If you've already started Synapse and a
|
||||||
|
database has been created, you will have to recreate the database.
|
||||||
|
|
||||||
You may additionally want to pass one or more "-v" options, in order to
|
You may additionally want to pass one or more "-v" options, in order to
|
||||||
increase the verbosity of logging output; at least for initial testing.
|
increase the verbosity of logging output; at least for initial testing.
|
||||||
|
|
||||||
|
|
|
@ -18,8 +18,8 @@ encoding use, e.g.::
|
||||||
This would create an appropriate database named ``synapse`` owned by the
|
This would create an appropriate database named ``synapse`` owned by the
|
||||||
``synapse_user`` user (which must already exist).
|
``synapse_user`` user (which must already exist).
|
||||||
|
|
||||||
Set up client
|
Set up client in Debian/Ubuntu
|
||||||
=============
|
===========================
|
||||||
|
|
||||||
Postgres support depends on the postgres python connector ``psycopg2``. In the
|
Postgres support depends on the postgres python connector ``psycopg2``. In the
|
||||||
virtual env::
|
virtual env::
|
||||||
|
@ -27,6 +27,19 @@ virtual env::
|
||||||
sudo apt-get install libpq-dev
|
sudo apt-get install libpq-dev
|
||||||
pip install psycopg2
|
pip install psycopg2
|
||||||
|
|
||||||
|
Set up client in RHEL/CentOs 7
|
||||||
|
==============================
|
||||||
|
|
||||||
|
Make sure you have the appropriate version of postgres-devel installed. For a
|
||||||
|
postgres 9.4, use the postgres 9.4 packages from
|
||||||
|
[here](https://wiki.postgresql.org/wiki/YUM_Installation).
|
||||||
|
|
||||||
|
As with Debian/Ubuntu, postgres support depends on the postgres python connector
|
||||||
|
``psycopg2``. In the virtual env::
|
||||||
|
|
||||||
|
sudo yum install postgresql-devel libpqxx-devel.x86_64
|
||||||
|
export PATH=/usr/pgsql-9.4/bin/:$PATH
|
||||||
|
pip install psycopg2
|
||||||
|
|
||||||
Synapse config
|
Synapse config
|
||||||
==============
|
==============
|
||||||
|
|
50
jenkins.sh
50
jenkins.sh
|
@ -5,9 +5,9 @@ export PYTHONDONTWRITEBYTECODE=yep
|
||||||
# Output test results as junit xml
|
# Output test results as junit xml
|
||||||
export TRIAL_FLAGS="--reporter=subunit"
|
export TRIAL_FLAGS="--reporter=subunit"
|
||||||
export TOXSUFFIX="| subunit-1to2 | subunit2junitxml --no-passthrough --output-to=results.xml"
|
export TOXSUFFIX="| subunit-1to2 | subunit2junitxml --no-passthrough --output-to=results.xml"
|
||||||
|
# Write coverage reports to a separate file for each process
|
||||||
# Output coverage to coverage.xml
|
export COVERAGE_OPTS="-p"
|
||||||
export DUMP_COVERAGE_COMMAND="coverage xml -o coverage.xml"
|
export DUMP_COVERAGE_COMMAND="coverage help"
|
||||||
|
|
||||||
# Output flake8 violations to violations.flake8.log
|
# Output flake8 violations to violations.flake8.log
|
||||||
# Don't exit with non-0 status code on Jenkins,
|
# Don't exit with non-0 status code on Jenkins,
|
||||||
|
@ -15,13 +15,13 @@ export DUMP_COVERAGE_COMMAND="coverage xml -o coverage.xml"
|
||||||
# UNSTABLE or FAILURE this build.
|
# UNSTABLE or FAILURE this build.
|
||||||
export PEP8SUFFIX="--output-file=violations.flake8.log || echo flake8 finished with status code \$?"
|
export PEP8SUFFIX="--output-file=violations.flake8.log || echo flake8 finished with status code \$?"
|
||||||
|
|
||||||
|
rm .coverage* || echo "No coverage files to remove"
|
||||||
|
|
||||||
tox
|
tox
|
||||||
|
|
||||||
: ${GIT_BRANCH:="origin/$(git rev-parse --abbrev-ref HEAD)"}
|
: ${GIT_BRANCH:="origin/$(git rev-parse --abbrev-ref HEAD)"}
|
||||||
|
|
||||||
set +u
|
TOX_BIN=$WORKSPACE/.tox/py27/bin
|
||||||
. .tox/py27/bin/activate
|
|
||||||
set -u
|
|
||||||
|
|
||||||
if [[ ! -e .sytest-base ]]; then
|
if [[ ! -e .sytest-base ]]; then
|
||||||
git clone https://github.com/matrix-org/sytest.git .sytest-base --mirror
|
git clone https://github.com/matrix-org/sytest.git .sytest-base --mirror
|
||||||
|
@ -42,4 +42,40 @@ export PERL5LIB PERL_MB_OPT PERL_MM_OPT
|
||||||
|
|
||||||
./install-deps.pl
|
./install-deps.pl
|
||||||
|
|
||||||
./run-tests.pl -O tap --synapse-directory .. --all > results.tap
|
: ${PORT_BASE:=8000}
|
||||||
|
|
||||||
|
echo >&2 "Running sytest with SQLite3";
|
||||||
|
./run-tests.pl --coverage -O tap --synapse-directory $WORKSPACE \
|
||||||
|
--python $TOX_BIN/python --all --port-base $PORT_BASE > results-sqlite3.tap
|
||||||
|
|
||||||
|
RUN_POSTGRES=""
|
||||||
|
|
||||||
|
for port in $(($PORT_BASE + 1)) $(($PORT_BASE + 2)); do
|
||||||
|
if psql synapse_jenkins_$port <<< ""; then
|
||||||
|
RUN_POSTGRES=$RUN_POSTGRES:$port
|
||||||
|
cat > localhost-$port/database.yaml << EOF
|
||||||
|
name: psycopg2
|
||||||
|
args:
|
||||||
|
database: synapse_jenkins_$port
|
||||||
|
EOF
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# Run if both postgresql databases exist
|
||||||
|
if test $RUN_POSTGRES = ":$(($PORT_BASE + 1)):$(($PORT_BASE + 2))"; then
|
||||||
|
echo >&2 "Running sytest with PostgreSQL";
|
||||||
|
$TOX_BIN/pip install psycopg2
|
||||||
|
./run-tests.pl --coverage -O tap --synapse-directory $WORKSPACE \
|
||||||
|
--python $TOX_BIN/python --all --port-base $PORT_BASE > results-postgresql.tap
|
||||||
|
else
|
||||||
|
echo >&2 "Skipping running sytest with PostgreSQL, $RUN_POSTGRES"
|
||||||
|
fi
|
||||||
|
|
||||||
|
cd ..
|
||||||
|
cp sytest/.coverage.* .
|
||||||
|
|
||||||
|
# Combine the coverage reports
|
||||||
|
echo "Combining:" .coverage.*
|
||||||
|
$TOX_BIN/python -m coverage combine
|
||||||
|
# Output coverage to coverage.xml
|
||||||
|
$TOX_BIN/coverage xml -o coverage.xml
|
||||||
|
|
|
@ -79,16 +79,16 @@ def defined_names(prefix, defs, names):
|
||||||
defined_names(prefix + name + ".", funcs, names)
|
defined_names(prefix + name + ".", funcs, names)
|
||||||
|
|
||||||
|
|
||||||
def used_names(prefix, defs, names):
|
def used_names(prefix, item, defs, names):
|
||||||
for name, funcs in defs.get('def', {}).items():
|
for name, funcs in defs.get('def', {}).items():
|
||||||
used_names(prefix + name + ".", funcs, names)
|
used_names(prefix + name + ".", name, funcs, names)
|
||||||
|
|
||||||
for name, funcs in defs.get('class', {}).items():
|
for name, funcs in defs.get('class', {}).items():
|
||||||
used_names(prefix + name + ".", funcs, names)
|
used_names(prefix + name + ".", name, funcs, names)
|
||||||
|
|
||||||
for used in defs.get('uses', ()):
|
for used in defs.get('uses', ()):
|
||||||
if used in names:
|
if used in names:
|
||||||
names[used].setdefault('used', []).append(prefix.rstrip('.'))
|
names[used].setdefault('used', {}).setdefault(item, []).append(prefix.rstrip('.'))
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
@ -109,6 +109,14 @@ if __name__ == '__main__':
|
||||||
"directories", nargs='+', metavar="DIR",
|
"directories", nargs='+', metavar="DIR",
|
||||||
help="Directories to search for definitions"
|
help="Directories to search for definitions"
|
||||||
)
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--referrers", default=0, type=int,
|
||||||
|
help="Include referrers up to the given depth"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--format", default="yaml",
|
||||||
|
help="Output format, one of 'yaml' or 'dot'"
|
||||||
|
)
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
definitions = {}
|
definitions = {}
|
||||||
|
@ -124,7 +132,7 @@ if __name__ == '__main__':
|
||||||
defined_names(filepath + ":", defs, names)
|
defined_names(filepath + ":", defs, names)
|
||||||
|
|
||||||
for filepath, defs in definitions.items():
|
for filepath, defs in definitions.items():
|
||||||
used_names(filepath + ":", defs, names)
|
used_names(filepath + ":", None, defs, names)
|
||||||
|
|
||||||
patterns = [re.compile(pattern) for pattern in args.pattern or ()]
|
patterns = [re.compile(pattern) for pattern in args.pattern or ()]
|
||||||
ignore = [re.compile(pattern) for pattern in args.ignore or ()]
|
ignore = [re.compile(pattern) for pattern in args.ignore or ()]
|
||||||
|
@ -139,4 +147,29 @@ if __name__ == '__main__':
|
||||||
continue
|
continue
|
||||||
result[name] = definition
|
result[name] = definition
|
||||||
|
|
||||||
yaml.dump(result, sys.stdout, default_flow_style=False)
|
referrer_depth = args.referrers
|
||||||
|
referrers = set()
|
||||||
|
while referrer_depth:
|
||||||
|
referrer_depth -= 1
|
||||||
|
for entry in result.values():
|
||||||
|
for used_by in entry.get("used", ()):
|
||||||
|
referrers.add(used_by)
|
||||||
|
for name, definition in names.items():
|
||||||
|
if not name in referrers:
|
||||||
|
continue
|
||||||
|
if ignore and any(pattern.match(name) for pattern in ignore):
|
||||||
|
continue
|
||||||
|
result[name] = definition
|
||||||
|
|
||||||
|
if args.format == 'yaml':
|
||||||
|
yaml.dump(result, sys.stdout, default_flow_style=False)
|
||||||
|
elif args.format == 'dot':
|
||||||
|
print "digraph {"
|
||||||
|
for name, entry in result.items():
|
||||||
|
print name
|
||||||
|
for used_by in entry.get("used", ()):
|
||||||
|
if used_by in result:
|
||||||
|
print used_by, "->", name
|
||||||
|
print "}"
|
||||||
|
else:
|
||||||
|
raise ValueError("Unknown format %r" % (args.format))
|
||||||
|
|
1
scripts/gen_password
Normal file
1
scripts/gen_password
Normal file
|
@ -0,0 +1 @@
|
||||||
|
perl -MCrypt::Random -MCrypt::Eksblowfish::Bcrypt -e 'print Crypt::Eksblowfish::Bcrypt::bcrypt("secret", "\$2\$12\$" . Crypt::Eksblowfish::Bcrypt::en_base64(Crypt::Random::makerandom_octet(Length=>16)))."\n"'
|
|
@ -16,4 +16,4 @@
|
||||||
""" This is a reference implementation of a Matrix home server.
|
""" This is a reference implementation of a Matrix home server.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
__version__ = "0.11.1"
|
__version__ = "0.12.0"
|
||||||
|
|
|
@ -207,6 +207,13 @@ class Auth(object):
|
||||||
user_id, room_id
|
user_id, room_id
|
||||||
))
|
))
|
||||||
|
|
||||||
|
if membership == Membership.LEAVE:
|
||||||
|
forgot = yield self.store.did_forget(user_id, room_id)
|
||||||
|
if forgot:
|
||||||
|
raise AuthError(403, "User %s not in room %s" % (
|
||||||
|
user_id, room_id
|
||||||
|
))
|
||||||
|
|
||||||
defer.returnValue(member)
|
defer.returnValue(member)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
|
@ -771,7 +778,7 @@ class Auth(object):
|
||||||
if "third_party_invite" in event.content:
|
if "third_party_invite" in event.content:
|
||||||
key = (
|
key = (
|
||||||
EventTypes.ThirdPartyInvite,
|
EventTypes.ThirdPartyInvite,
|
||||||
event.content["third_party_invite"]["token"]
|
event.content["third_party_invite"]["signed"]["token"]
|
||||||
)
|
)
|
||||||
third_party_invite = current_state.get(key)
|
third_party_invite = current_state.get(key)
|
||||||
if third_party_invite:
|
if third_party_invite:
|
||||||
|
@ -853,7 +860,7 @@ class Auth(object):
|
||||||
|
|
||||||
redact_level = self._get_named_level(auth_events, "redact", 50)
|
redact_level = self._get_named_level(auth_events, "redact", 50)
|
||||||
|
|
||||||
if user_level > redact_level:
|
if user_level >= redact_level:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
redacter_domain = EventID.from_string(event.event_id).domain
|
redacter_domain = EventID.from_string(event.event_id).domain
|
||||||
|
|
|
@ -120,6 +120,22 @@ class AuthError(SynapseError):
|
||||||
super(AuthError, self).__init__(*args, **kwargs)
|
super(AuthError, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
class GuestAccessError(AuthError):
|
||||||
|
"""An error raised when a there is a problem with a guest user accessing
|
||||||
|
a room"""
|
||||||
|
|
||||||
|
def __init__(self, rooms, *args, **kwargs):
|
||||||
|
self.rooms = rooms
|
||||||
|
super(GuestAccessError, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
def error_dict(self):
|
||||||
|
return cs_error(
|
||||||
|
self.msg,
|
||||||
|
self.errcode,
|
||||||
|
rooms=self.rooms,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class EventSizeError(SynapseError):
|
class EventSizeError(SynapseError):
|
||||||
"""An error raised when an event is too big."""
|
"""An error raised when an event is too big."""
|
||||||
|
|
||||||
|
|
|
@ -50,7 +50,7 @@ class Filtering(object):
|
||||||
# many definitions.
|
# many definitions.
|
||||||
|
|
||||||
top_level_definitions = [
|
top_level_definitions = [
|
||||||
"presence"
|
"presence", "account_data"
|
||||||
]
|
]
|
||||||
|
|
||||||
room_level_definitions = [
|
room_level_definitions = [
|
||||||
|
@ -62,10 +62,29 @@ class Filtering(object):
|
||||||
self._check_definition(user_filter_json[key])
|
self._check_definition(user_filter_json[key])
|
||||||
|
|
||||||
if "room" in user_filter_json:
|
if "room" in user_filter_json:
|
||||||
|
self._check_definition_room_lists(user_filter_json["room"])
|
||||||
for key in room_level_definitions:
|
for key in room_level_definitions:
|
||||||
if key in user_filter_json["room"]:
|
if key in user_filter_json["room"]:
|
||||||
self._check_definition(user_filter_json["room"][key])
|
self._check_definition(user_filter_json["room"][key])
|
||||||
|
|
||||||
|
def _check_definition_room_lists(self, definition):
|
||||||
|
"""Check that "rooms" and "not_rooms" are lists of room ids if they
|
||||||
|
are present
|
||||||
|
|
||||||
|
Args:
|
||||||
|
definition(dict): The filter definition
|
||||||
|
Raises:
|
||||||
|
SynapseError: If there was a problem with this definition.
|
||||||
|
"""
|
||||||
|
# check rooms are valid room IDs
|
||||||
|
room_id_keys = ["rooms", "not_rooms"]
|
||||||
|
for key in room_id_keys:
|
||||||
|
if key in definition:
|
||||||
|
if type(definition[key]) != list:
|
||||||
|
raise SynapseError(400, "Expected %s to be a list." % key)
|
||||||
|
for room_id in definition[key]:
|
||||||
|
RoomID.from_string(room_id)
|
||||||
|
|
||||||
def _check_definition(self, definition):
|
def _check_definition(self, definition):
|
||||||
"""Check if the provided definition is valid.
|
"""Check if the provided definition is valid.
|
||||||
|
|
||||||
|
@ -85,14 +104,7 @@ class Filtering(object):
|
||||||
400, "Expected JSON object, not %s" % (definition,)
|
400, "Expected JSON object, not %s" % (definition,)
|
||||||
)
|
)
|
||||||
|
|
||||||
# check rooms are valid room IDs
|
self._check_definition_room_lists(definition)
|
||||||
room_id_keys = ["rooms", "not_rooms"]
|
|
||||||
for key in room_id_keys:
|
|
||||||
if key in definition:
|
|
||||||
if type(definition[key]) != list:
|
|
||||||
raise SynapseError(400, "Expected %s to be a list." % key)
|
|
||||||
for room_id in definition[key]:
|
|
||||||
RoomID.from_string(room_id)
|
|
||||||
|
|
||||||
# check senders are valid user IDs
|
# check senders are valid user IDs
|
||||||
user_id_keys = ["senders", "not_senders"]
|
user_id_keys = ["senders", "not_senders"]
|
||||||
|
@ -119,25 +131,26 @@ class FilterCollection(object):
|
||||||
def __init__(self, filter_json):
|
def __init__(self, filter_json):
|
||||||
self.filter_json = filter_json
|
self.filter_json = filter_json
|
||||||
|
|
||||||
self.room_timeline_filter = Filter(
|
room_filter_json = self.filter_json.get("room", {})
|
||||||
self.filter_json.get("room", {}).get("timeline", {})
|
|
||||||
|
self.room_filter = Filter({
|
||||||
|
k: v for k, v in room_filter_json.items()
|
||||||
|
if k in ("rooms", "not_rooms")
|
||||||
|
})
|
||||||
|
|
||||||
|
self.room_timeline_filter = Filter(room_filter_json.get("timeline", {}))
|
||||||
|
self.room_state_filter = Filter(room_filter_json.get("state", {}))
|
||||||
|
self.room_ephemeral_filter = Filter(room_filter_json.get("ephemeral", {}))
|
||||||
|
self.room_account_data = Filter(room_filter_json.get("account_data", {}))
|
||||||
|
self.presence_filter = Filter(self.filter_json.get("presence", {}))
|
||||||
|
self.account_data = Filter(self.filter_json.get("account_data", {}))
|
||||||
|
|
||||||
|
self.include_leave = self.filter_json.get("room", {}).get(
|
||||||
|
"include_leave", False
|
||||||
)
|
)
|
||||||
|
|
||||||
self.room_state_filter = Filter(
|
def list_rooms(self):
|
||||||
self.filter_json.get("room", {}).get("state", {})
|
return self.room_filter.list_rooms()
|
||||||
)
|
|
||||||
|
|
||||||
self.room_ephemeral_filter = Filter(
|
|
||||||
self.filter_json.get("room", {}).get("ephemeral", {})
|
|
||||||
)
|
|
||||||
|
|
||||||
self.room_account_data = Filter(
|
|
||||||
self.filter_json.get("room", {}).get("account_data", {})
|
|
||||||
)
|
|
||||||
|
|
||||||
self.presence_filter = Filter(
|
|
||||||
self.filter_json.get("presence", {})
|
|
||||||
)
|
|
||||||
|
|
||||||
def timeline_limit(self):
|
def timeline_limit(self):
|
||||||
return self.room_timeline_filter.limit()
|
return self.room_timeline_filter.limit()
|
||||||
|
@ -151,23 +164,35 @@ class FilterCollection(object):
|
||||||
def filter_presence(self, events):
|
def filter_presence(self, events):
|
||||||
return self.presence_filter.filter(events)
|
return self.presence_filter.filter(events)
|
||||||
|
|
||||||
|
def filter_account_data(self, events):
|
||||||
|
return self.account_data.filter(events)
|
||||||
|
|
||||||
def filter_room_state(self, events):
|
def filter_room_state(self, events):
|
||||||
return self.room_state_filter.filter(events)
|
return self.room_state_filter.filter(self.room_filter.filter(events))
|
||||||
|
|
||||||
def filter_room_timeline(self, events):
|
def filter_room_timeline(self, events):
|
||||||
return self.room_timeline_filter.filter(events)
|
return self.room_timeline_filter.filter(self.room_filter.filter(events))
|
||||||
|
|
||||||
def filter_room_ephemeral(self, events):
|
def filter_room_ephemeral(self, events):
|
||||||
return self.room_ephemeral_filter.filter(events)
|
return self.room_ephemeral_filter.filter(self.room_filter.filter(events))
|
||||||
|
|
||||||
def filter_room_account_data(self, events):
|
def filter_room_account_data(self, events):
|
||||||
return self.room_account_data.filter(events)
|
return self.room_account_data.filter(self.room_filter.filter(events))
|
||||||
|
|
||||||
|
|
||||||
class Filter(object):
|
class Filter(object):
|
||||||
def __init__(self, filter_json):
|
def __init__(self, filter_json):
|
||||||
self.filter_json = filter_json
|
self.filter_json = filter_json
|
||||||
|
|
||||||
|
def list_rooms(self):
|
||||||
|
"""The list of room_id strings this filter restricts the output to
|
||||||
|
or None if the this filter doesn't list the room ids.
|
||||||
|
"""
|
||||||
|
if "rooms" in self.filter_json:
|
||||||
|
return list(set(self.filter_json["rooms"]))
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
def check(self, event):
|
def check(self, event):
|
||||||
"""Checks whether the filter matches the given event.
|
"""Checks whether the filter matches the given event.
|
||||||
|
|
||||||
|
|
|
@ -15,6 +15,8 @@
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
|
from synapse.rest import ClientRestResource
|
||||||
|
|
||||||
sys.dont_write_bytecode = True
|
sys.dont_write_bytecode = True
|
||||||
from synapse.python_dependencies import (
|
from synapse.python_dependencies import (
|
||||||
check_requirements, DEPENDENCY_LINKS, MissingRequirementError
|
check_requirements, DEPENDENCY_LINKS, MissingRequirementError
|
||||||
|
@ -53,15 +55,13 @@ from synapse.rest.key.v1.server_key_resource import LocalKey
|
||||||
from synapse.rest.key.v2 import KeyApiV2Resource
|
from synapse.rest.key.v2 import KeyApiV2Resource
|
||||||
from synapse.http.matrixfederationclient import MatrixFederationHttpClient
|
from synapse.http.matrixfederationclient import MatrixFederationHttpClient
|
||||||
from synapse.api.urls import (
|
from synapse.api.urls import (
|
||||||
CLIENT_PREFIX, FEDERATION_PREFIX, WEB_CLIENT_PREFIX, CONTENT_REPO_PREFIX,
|
FEDERATION_PREFIX, WEB_CLIENT_PREFIX, CONTENT_REPO_PREFIX,
|
||||||
SERVER_KEY_PREFIX, MEDIA_PREFIX, CLIENT_V2_ALPHA_PREFIX, STATIC_PREFIX,
|
SERVER_KEY_PREFIX, MEDIA_PREFIX, STATIC_PREFIX,
|
||||||
SERVER_KEY_V2_PREFIX,
|
SERVER_KEY_V2_PREFIX,
|
||||||
)
|
)
|
||||||
from synapse.config.homeserver import HomeServerConfig
|
from synapse.config.homeserver import HomeServerConfig
|
||||||
from synapse.crypto import context_factory
|
from synapse.crypto import context_factory
|
||||||
from synapse.util.logcontext import LoggingContext
|
from synapse.util.logcontext import LoggingContext
|
||||||
from synapse.rest.client.v1 import ClientV1RestResource
|
|
||||||
from synapse.rest.client.v2_alpha import ClientV2AlphaRestResource
|
|
||||||
from synapse.metrics.resource import MetricsResource, METRICS_PREFIX
|
from synapse.metrics.resource import MetricsResource, METRICS_PREFIX
|
||||||
|
|
||||||
from synapse import events
|
from synapse import events
|
||||||
|
@ -92,11 +92,8 @@ class SynapseHomeServer(HomeServer):
|
||||||
def build_http_client(self):
|
def build_http_client(self):
|
||||||
return MatrixFederationHttpClient(self)
|
return MatrixFederationHttpClient(self)
|
||||||
|
|
||||||
def build_resource_for_client(self):
|
def build_client_resource(self):
|
||||||
return ClientV1RestResource(self)
|
return ClientRestResource(self)
|
||||||
|
|
||||||
def build_resource_for_client_v2_alpha(self):
|
|
||||||
return ClientV2AlphaRestResource(self)
|
|
||||||
|
|
||||||
def build_resource_for_federation(self):
|
def build_resource_for_federation(self):
|
||||||
return JsonResource(self)
|
return JsonResource(self)
|
||||||
|
@ -179,16 +176,15 @@ class SynapseHomeServer(HomeServer):
|
||||||
for res in listener_config["resources"]:
|
for res in listener_config["resources"]:
|
||||||
for name in res["names"]:
|
for name in res["names"]:
|
||||||
if name == "client":
|
if name == "client":
|
||||||
|
client_resource = self.get_client_resource()
|
||||||
if res["compress"]:
|
if res["compress"]:
|
||||||
client_v1 = gz_wrap(self.get_resource_for_client())
|
client_resource = gz_wrap(client_resource)
|
||||||
client_v2 = gz_wrap(self.get_resource_for_client_v2_alpha())
|
|
||||||
else:
|
|
||||||
client_v1 = self.get_resource_for_client()
|
|
||||||
client_v2 = self.get_resource_for_client_v2_alpha()
|
|
||||||
|
|
||||||
resources.update({
|
resources.update({
|
||||||
CLIENT_PREFIX: client_v1,
|
"/_matrix/client/api/v1": client_resource,
|
||||||
CLIENT_V2_ALPHA_PREFIX: client_v2,
|
"/_matrix/client/r0": client_resource,
|
||||||
|
"/_matrix/client/unstable": client_resource,
|
||||||
|
"/_matrix/client/v2_alpha": client_resource,
|
||||||
})
|
})
|
||||||
|
|
||||||
if name == "federation":
|
if name == "federation":
|
||||||
|
@ -499,13 +495,28 @@ class SynapseRequest(Request):
|
||||||
self.start_time = int(time.time() * 1000)
|
self.start_time = int(time.time() * 1000)
|
||||||
|
|
||||||
def finished_processing(self):
|
def finished_processing(self):
|
||||||
|
|
||||||
|
try:
|
||||||
|
context = LoggingContext.current_context()
|
||||||
|
ru_utime, ru_stime = context.get_resource_usage()
|
||||||
|
db_txn_count = context.db_txn_count
|
||||||
|
db_txn_duration = context.db_txn_duration
|
||||||
|
except:
|
||||||
|
ru_utime, ru_stime = (0, 0)
|
||||||
|
db_txn_count, db_txn_duration = (0, 0)
|
||||||
|
|
||||||
self.site.access_logger.info(
|
self.site.access_logger.info(
|
||||||
"%s - %s - {%s}"
|
"%s - %s - {%s}"
|
||||||
" Processed request: %dms %sB %s \"%s %s %s\" \"%s\"",
|
" Processed request: %dms (%dms, %dms) (%dms/%d)"
|
||||||
|
" %sB %s \"%s %s %s\" \"%s\"",
|
||||||
self.getClientIP(),
|
self.getClientIP(),
|
||||||
self.site.site_tag,
|
self.site.site_tag,
|
||||||
self.authenticated_entity,
|
self.authenticated_entity,
|
||||||
int(time.time() * 1000) - self.start_time,
|
int(time.time() * 1000) - self.start_time,
|
||||||
|
int(ru_utime * 1000),
|
||||||
|
int(ru_stime * 1000),
|
||||||
|
int(db_txn_duration * 1000),
|
||||||
|
int(db_txn_count),
|
||||||
self.sentLength,
|
self.sentLength,
|
||||||
self.code,
|
self.code,
|
||||||
self.method,
|
self.method,
|
||||||
|
|
|
@ -133,6 +133,7 @@ class ServerConfig(Config):
|
||||||
# The domain name of the server, with optional explicit port.
|
# The domain name of the server, with optional explicit port.
|
||||||
# This is used by remote servers to connect to this server,
|
# This is used by remote servers to connect to this server,
|
||||||
# e.g. matrix.org, localhost:8080, etc.
|
# e.g. matrix.org, localhost:8080, etc.
|
||||||
|
# This is also the last part of your UserID.
|
||||||
server_name: "%(server_name)s"
|
server_name: "%(server_name)s"
|
||||||
|
|
||||||
# When running as a daemon, the file to store the pid in
|
# When running as a daemon, the file to store the pid in
|
||||||
|
|
|
@ -230,7 +230,9 @@ class Keyring(object):
|
||||||
|
|
||||||
missing_keys = {}
|
missing_keys = {}
|
||||||
for group in group_id_to_group.values():
|
for group in group_id_to_group.values():
|
||||||
missing_keys.setdefault(group.server_name, set()).union(group.key_ids)
|
missing_keys.setdefault(group.server_name, set()).update(
|
||||||
|
group.key_ids
|
||||||
|
)
|
||||||
|
|
||||||
for fn in key_fetch_fns:
|
for fn in key_fetch_fns:
|
||||||
results = yield fn(missing_keys.items())
|
results = yield fn(missing_keys.items())
|
||||||
|
|
|
@ -100,22 +100,20 @@ def format_event_raw(d):
|
||||||
|
|
||||||
|
|
||||||
def format_event_for_client_v1(d):
|
def format_event_for_client_v1(d):
|
||||||
d["user_id"] = d.pop("sender", None)
|
d = format_event_for_client_v2(d)
|
||||||
|
|
||||||
move_keys = (
|
sender = d.get("sender")
|
||||||
|
if sender is not None:
|
||||||
|
d["user_id"] = sender
|
||||||
|
|
||||||
|
copy_keys = (
|
||||||
"age", "redacted_because", "replaces_state", "prev_content",
|
"age", "redacted_because", "replaces_state", "prev_content",
|
||||||
"invite_room_state",
|
"invite_room_state",
|
||||||
)
|
)
|
||||||
for key in move_keys:
|
for key in copy_keys:
|
||||||
if key in d["unsigned"]:
|
if key in d["unsigned"]:
|
||||||
d[key] = d["unsigned"][key]
|
d[key] = d["unsigned"][key]
|
||||||
|
|
||||||
drop_keys = (
|
|
||||||
"auth_events", "prev_events", "hashes", "signatures", "depth",
|
|
||||||
"unsigned", "origin", "prev_state"
|
|
||||||
)
|
|
||||||
for key in drop_keys:
|
|
||||||
d.pop(key, None)
|
|
||||||
return d
|
return d
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -165,7 +165,7 @@ class BaseFederationServlet(object):
|
||||||
if code is None:
|
if code is None:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
server.register_path(method, pattern, self._wrap(code))
|
server.register_paths(method, (pattern,), self._wrap(code))
|
||||||
|
|
||||||
|
|
||||||
class FederationSendServlet(BaseFederationServlet):
|
class FederationSendServlet(BaseFederationServlet):
|
||||||
|
|
|
@ -92,7 +92,15 @@ class BaseHandler(object):
|
||||||
|
|
||||||
membership_event = state.get((EventTypes.Member, user_id), None)
|
membership_event = state.get((EventTypes.Member, user_id), None)
|
||||||
if membership_event:
|
if membership_event:
|
||||||
membership = membership_event.membership
|
was_forgotten_at_event = yield self.store.was_forgotten_at(
|
||||||
|
membership_event.state_key,
|
||||||
|
membership_event.room_id,
|
||||||
|
membership_event.event_id
|
||||||
|
)
|
||||||
|
if was_forgotten_at_event:
|
||||||
|
membership = None
|
||||||
|
else:
|
||||||
|
membership = membership_event.membership
|
||||||
else:
|
else:
|
||||||
membership = None
|
membership = None
|
||||||
|
|
||||||
|
|
|
@ -29,9 +29,10 @@ class AccountDataEventSource(object):
|
||||||
last_stream_id = from_key
|
last_stream_id = from_key
|
||||||
|
|
||||||
current_stream_id = yield self.store.get_max_account_data_stream_id()
|
current_stream_id = yield self.store.get_max_account_data_stream_id()
|
||||||
tags = yield self.store.get_updated_tags(user_id, last_stream_id)
|
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
|
tags = yield self.store.get_updated_tags(user_id, last_stream_id)
|
||||||
|
|
||||||
for room_id, room_tags in tags.items():
|
for room_id, room_tags in tags.items():
|
||||||
results.append({
|
results.append({
|
||||||
"type": "m.tag",
|
"type": "m.tag",
|
||||||
|
@ -39,6 +40,24 @@ class AccountDataEventSource(object):
|
||||||
"room_id": room_id,
|
"room_id": room_id,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
account_data, room_account_data = (
|
||||||
|
yield self.store.get_updated_account_data_for_user(user_id, last_stream_id)
|
||||||
|
)
|
||||||
|
|
||||||
|
for account_data_type, content in account_data.items():
|
||||||
|
results.append({
|
||||||
|
"type": account_data_type,
|
||||||
|
"content": content,
|
||||||
|
})
|
||||||
|
|
||||||
|
for room_id, account_data in room_account_data.items():
|
||||||
|
for account_data_type, content in account_data.items():
|
||||||
|
results.append({
|
||||||
|
"type": account_data_type,
|
||||||
|
"content": content,
|
||||||
|
"room_id": room_id,
|
||||||
|
})
|
||||||
|
|
||||||
defer.returnValue((results, current_stream_id))
|
defer.returnValue((results, current_stream_id))
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
|
|
|
@ -30,34 +30,27 @@ class AdminHandler(BaseHandler):
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def get_whois(self, user):
|
def get_whois(self, user):
|
||||||
res = yield self.store.get_user_ip_and_agents(user)
|
connections = []
|
||||||
|
|
||||||
d = {}
|
sessions = yield self.store.get_user_ip_and_agents(user)
|
||||||
for r in res:
|
for session in sessions:
|
||||||
# Note that device_id is always None
|
connections.append({
|
||||||
device = d.setdefault(r["device_id"], {})
|
"ip": session["ip"],
|
||||||
session = device.setdefault(r["access_token"], [])
|
"last_seen": session["last_seen"],
|
||||||
session.append({
|
"user_agent": session["user_agent"],
|
||||||
"ip": r["ip"],
|
|
||||||
"user_agent": r["user_agent"],
|
|
||||||
"last_seen": r["last_seen"],
|
|
||||||
})
|
})
|
||||||
|
|
||||||
ret = {
|
ret = {
|
||||||
"user_id": user.to_string(),
|
"user_id": user.to_string(),
|
||||||
"devices": [
|
"devices": {
|
||||||
{
|
"": {
|
||||||
"device_id": k,
|
|
||||||
"sessions": [
|
"sessions": [
|
||||||
{
|
{
|
||||||
# "access_token": x, TODO (erikj)
|
"connections": connections,
|
||||||
"connections": y,
|
|
||||||
}
|
}
|
||||||
for x, y in v.items()
|
|
||||||
]
|
]
|
||||||
}
|
},
|
||||||
for k, v in d.items()
|
},
|
||||||
],
|
|
||||||
}
|
}
|
||||||
|
|
||||||
defer.returnValue(ret)
|
defer.returnValue(ret)
|
||||||
|
|
|
@ -28,6 +28,18 @@ import random
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def started_user_eventstream(distributor, user):
|
||||||
|
return distributor.fire("started_user_eventstream", user)
|
||||||
|
|
||||||
|
|
||||||
|
def stopped_user_eventstream(distributor, user):
|
||||||
|
return distributor.fire("stopped_user_eventstream", user)
|
||||||
|
|
||||||
|
|
||||||
|
def user_joined_room(distributor, user, room_id):
|
||||||
|
return distributor.fire("user_joined_room", user, room_id)
|
||||||
|
|
||||||
|
|
||||||
class EventStreamHandler(BaseHandler):
|
class EventStreamHandler(BaseHandler):
|
||||||
|
|
||||||
def __init__(self, hs):
|
def __init__(self, hs):
|
||||||
|
@ -57,7 +69,12 @@ class EventStreamHandler(BaseHandler):
|
||||||
A deferred that completes once their presence has been updated.
|
A deferred that completes once their presence has been updated.
|
||||||
"""
|
"""
|
||||||
if user not in self._streams_per_user:
|
if user not in self._streams_per_user:
|
||||||
self._streams_per_user[user] = 0
|
# Make sure we set the streams per user to 1 here rather than
|
||||||
|
# setting it to zero and incrementing the value below.
|
||||||
|
# Otherwise this may race with stopped_stream causing the
|
||||||
|
# user to be erased from the map before we have a chance
|
||||||
|
# to increment it.
|
||||||
|
self._streams_per_user[user] = 1
|
||||||
if user in self._stop_timer_per_user:
|
if user in self._stop_timer_per_user:
|
||||||
try:
|
try:
|
||||||
self.clock.cancel_call_later(
|
self.clock.cancel_call_later(
|
||||||
|
@ -66,9 +83,9 @@ class EventStreamHandler(BaseHandler):
|
||||||
except:
|
except:
|
||||||
logger.exception("Failed to cancel event timer")
|
logger.exception("Failed to cancel event timer")
|
||||||
else:
|
else:
|
||||||
yield self.distributor.fire("started_user_eventstream", user)
|
yield started_user_eventstream(self.distributor, user)
|
||||||
|
else:
|
||||||
self._streams_per_user[user] += 1
|
self._streams_per_user[user] += 1
|
||||||
|
|
||||||
def stopped_stream(self, user):
|
def stopped_stream(self, user):
|
||||||
"""If there are no streams for a user this starts a timer that will
|
"""If there are no streams for a user this starts a timer that will
|
||||||
|
@ -89,7 +106,7 @@ class EventStreamHandler(BaseHandler):
|
||||||
|
|
||||||
self._stop_timer_per_user.pop(user, None)
|
self._stop_timer_per_user.pop(user, None)
|
||||||
|
|
||||||
return self.distributor.fire("stopped_user_eventstream", user)
|
return stopped_user_eventstream(self.distributor, user)
|
||||||
|
|
||||||
logger.debug("Scheduling _later: for %s", user)
|
logger.debug("Scheduling _later: for %s", user)
|
||||||
self._stop_timer_per_user[user] = (
|
self._stop_timer_per_user[user] = (
|
||||||
|
@ -120,9 +137,7 @@ class EventStreamHandler(BaseHandler):
|
||||||
timeout = random.randint(int(timeout*0.9), int(timeout*1.1))
|
timeout = random.randint(int(timeout*0.9), int(timeout*1.1))
|
||||||
|
|
||||||
if is_guest:
|
if is_guest:
|
||||||
yield self.distributor.fire(
|
yield user_joined_room(self.distributor, auth_user, room_id)
|
||||||
"user_joined_room", user=auth_user, room_id=room_id
|
|
||||||
)
|
|
||||||
|
|
||||||
events, tokens = yield self.notifier.get_events_for(
|
events, tokens = yield self.notifier.get_events_for(
|
||||||
auth_user, pagin_config, timeout,
|
auth_user, pagin_config, timeout,
|
||||||
|
|
|
@ -44,6 +44,10 @@ import logging
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def user_joined_room(distributor, user, room_id):
|
||||||
|
return distributor.fire("user_joined_room", user, room_id)
|
||||||
|
|
||||||
|
|
||||||
class FederationHandler(BaseHandler):
|
class FederationHandler(BaseHandler):
|
||||||
"""Handles events that originated from federation.
|
"""Handles events that originated from federation.
|
||||||
Responsible for:
|
Responsible for:
|
||||||
|
@ -60,10 +64,7 @@ class FederationHandler(BaseHandler):
|
||||||
|
|
||||||
self.hs = hs
|
self.hs = hs
|
||||||
|
|
||||||
self.distributor.observe(
|
self.distributor.observe("user_joined_room", self.user_joined_room)
|
||||||
"user_joined_room",
|
|
||||||
self._on_user_joined
|
|
||||||
)
|
|
||||||
|
|
||||||
self.waiting_for_join_list = {}
|
self.waiting_for_join_list = {}
|
||||||
|
|
||||||
|
@ -176,7 +177,7 @@ class FederationHandler(BaseHandler):
|
||||||
)
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
_, event_stream_id, max_stream_id = yield self._handle_new_event(
|
context, event_stream_id, max_stream_id = yield self._handle_new_event(
|
||||||
origin,
|
origin,
|
||||||
event,
|
event,
|
||||||
state=state,
|
state=state,
|
||||||
|
@ -233,10 +234,13 @@ class FederationHandler(BaseHandler):
|
||||||
|
|
||||||
if event.type == EventTypes.Member:
|
if event.type == EventTypes.Member:
|
||||||
if event.membership == Membership.JOIN:
|
if event.membership == Membership.JOIN:
|
||||||
user = UserID.from_string(event.state_key)
|
prev_state = context.current_state.get((event.type, event.state_key))
|
||||||
yield self.distributor.fire(
|
if not prev_state or prev_state.membership != Membership.JOIN:
|
||||||
"user_joined_room", user=user, room_id=event.room_id
|
# Only fire user_joined_room if the user has acutally
|
||||||
)
|
# joined the room. Don't bother if the user is just
|
||||||
|
# changing their profile info.
|
||||||
|
user = UserID.from_string(event.state_key)
|
||||||
|
yield user_joined_room(self.distributor, user, event.room_id)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _filter_events_for_server(self, server_name, room_id, events):
|
def _filter_events_for_server(self, server_name, room_id, events):
|
||||||
|
@ -592,7 +596,7 @@ class FederationHandler(BaseHandler):
|
||||||
handled_events = set()
|
handled_events = set()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
new_event = self._sign_event(event)
|
event = self._sign_event(event)
|
||||||
# Try the host we successfully got a response to /make_join/
|
# Try the host we successfully got a response to /make_join/
|
||||||
# request first.
|
# request first.
|
||||||
try:
|
try:
|
||||||
|
@ -600,7 +604,7 @@ class FederationHandler(BaseHandler):
|
||||||
target_hosts.insert(0, origin)
|
target_hosts.insert(0, origin)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
pass
|
pass
|
||||||
ret = yield self.replication_layer.send_join(target_hosts, new_event)
|
ret = yield self.replication_layer.send_join(target_hosts, event)
|
||||||
|
|
||||||
origin = ret["origin"]
|
origin = ret["origin"]
|
||||||
state = ret["state"]
|
state = ret["state"]
|
||||||
|
@ -609,12 +613,12 @@ class FederationHandler(BaseHandler):
|
||||||
|
|
||||||
handled_events.update([s.event_id for s in state])
|
handled_events.update([s.event_id for s in state])
|
||||||
handled_events.update([a.event_id for a in auth_chain])
|
handled_events.update([a.event_id for a in auth_chain])
|
||||||
handled_events.add(new_event.event_id)
|
handled_events.add(event.event_id)
|
||||||
|
|
||||||
logger.debug("do_invite_join auth_chain: %s", auth_chain)
|
logger.debug("do_invite_join auth_chain: %s", auth_chain)
|
||||||
logger.debug("do_invite_join state: %s", state)
|
logger.debug("do_invite_join state: %s", state)
|
||||||
|
|
||||||
logger.debug("do_invite_join event: %s", new_event)
|
logger.debug("do_invite_join event: %s", event)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
yield self.store.store_room(
|
yield self.store.store_room(
|
||||||
|
@ -632,14 +636,14 @@ class FederationHandler(BaseHandler):
|
||||||
|
|
||||||
with PreserveLoggingContext():
|
with PreserveLoggingContext():
|
||||||
d = self.notifier.on_new_room_event(
|
d = self.notifier.on_new_room_event(
|
||||||
new_event, event_stream_id, max_stream_id,
|
event, event_stream_id, max_stream_id,
|
||||||
extra_users=[joinee]
|
extra_users=[joinee]
|
||||||
)
|
)
|
||||||
|
|
||||||
def log_failure(f):
|
def log_failure(f):
|
||||||
logger.warn(
|
logger.warn(
|
||||||
"Failed to notify about %s: %s",
|
"Failed to notify about %s: %s",
|
||||||
new_event.event_id, f.value
|
event.event_id, f.value
|
||||||
)
|
)
|
||||||
|
|
||||||
d.addErrback(log_failure)
|
d.addErrback(log_failure)
|
||||||
|
@ -733,9 +737,7 @@ class FederationHandler(BaseHandler):
|
||||||
if event.type == EventTypes.Member:
|
if event.type == EventTypes.Member:
|
||||||
if event.content["membership"] == Membership.JOIN:
|
if event.content["membership"] == Membership.JOIN:
|
||||||
user = UserID.from_string(event.state_key)
|
user = UserID.from_string(event.state_key)
|
||||||
yield self.distributor.fire(
|
yield user_joined_room(self.distributor, user, event.room_id)
|
||||||
"user_joined_room", user=user, room_id=event.room_id
|
|
||||||
)
|
|
||||||
|
|
||||||
new_pdu = event
|
new_pdu = event
|
||||||
|
|
||||||
|
@ -1082,7 +1084,7 @@ class FederationHandler(BaseHandler):
|
||||||
return self.store.get_min_depth(context)
|
return self.store.get_min_depth(context)
|
||||||
|
|
||||||
@log_function
|
@log_function
|
||||||
def _on_user_joined(self, user, room_id):
|
def user_joined_room(self, user, room_id):
|
||||||
waiters = self.waiting_for_join_list.get(
|
waiters = self.waiting_for_join_list.get(
|
||||||
(user.to_string(), room_id),
|
(user.to_string(), room_id),
|
||||||
[]
|
[]
|
||||||
|
@ -1648,11 +1650,22 @@ class FederationHandler(BaseHandler):
|
||||||
sender = invite["sender"]
|
sender = invite["sender"]
|
||||||
room_id = invite["room_id"]
|
room_id = invite["room_id"]
|
||||||
|
|
||||||
|
if "signed" not in invite or "token" not in invite["signed"]:
|
||||||
|
logger.info(
|
||||||
|
"Discarding received notification of third party invite "
|
||||||
|
"without signed: %s" % (invite,)
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
third_party_invite = {
|
||||||
|
"signed": invite["signed"],
|
||||||
|
}
|
||||||
|
|
||||||
event_dict = {
|
event_dict = {
|
||||||
"type": EventTypes.Member,
|
"type": EventTypes.Member,
|
||||||
"content": {
|
"content": {
|
||||||
"membership": Membership.INVITE,
|
"membership": Membership.INVITE,
|
||||||
"third_party_invite": invite,
|
"third_party_invite": third_party_invite,
|
||||||
},
|
},
|
||||||
"room_id": room_id,
|
"room_id": room_id,
|
||||||
"sender": sender,
|
"sender": sender,
|
||||||
|
@ -1663,6 +1676,11 @@ class FederationHandler(BaseHandler):
|
||||||
builder = self.event_builder_factory.new(event_dict)
|
builder = self.event_builder_factory.new(event_dict)
|
||||||
EventValidator().validate_new(builder)
|
EventValidator().validate_new(builder)
|
||||||
event, context = yield self._create_new_client_event(builder=builder)
|
event, context = yield self._create_new_client_event(builder=builder)
|
||||||
|
|
||||||
|
event, context = yield self.add_display_name_to_third_party_invite(
|
||||||
|
event_dict, event, context
|
||||||
|
)
|
||||||
|
|
||||||
self.auth.check(event, context.current_state)
|
self.auth.check(event, context.current_state)
|
||||||
yield self._validate_keyserver(event, auth_events=context.current_state)
|
yield self._validate_keyserver(event, auth_events=context.current_state)
|
||||||
member_handler = self.hs.get_handlers().room_member_handler
|
member_handler = self.hs.get_handlers().room_member_handler
|
||||||
|
@ -1684,6 +1702,10 @@ class FederationHandler(BaseHandler):
|
||||||
builder=builder,
|
builder=builder,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
event, context = yield self.add_display_name_to_third_party_invite(
|
||||||
|
event_dict, event, context
|
||||||
|
)
|
||||||
|
|
||||||
self.auth.check(event, auth_events=context.current_state)
|
self.auth.check(event, auth_events=context.current_state)
|
||||||
yield self._validate_keyserver(event, auth_events=context.current_state)
|
yield self._validate_keyserver(event, auth_events=context.current_state)
|
||||||
|
|
||||||
|
@ -1693,6 +1715,27 @@ class FederationHandler(BaseHandler):
|
||||||
member_handler = self.hs.get_handlers().room_member_handler
|
member_handler = self.hs.get_handlers().room_member_handler
|
||||||
yield member_handler.change_membership(event, context)
|
yield member_handler.change_membership(event, context)
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def add_display_name_to_third_party_invite(self, event_dict, event, context):
|
||||||
|
key = (
|
||||||
|
EventTypes.ThirdPartyInvite,
|
||||||
|
event.content["third_party_invite"]["signed"]["token"]
|
||||||
|
)
|
||||||
|
original_invite = context.current_state.get(key)
|
||||||
|
if not original_invite:
|
||||||
|
logger.info(
|
||||||
|
"Could not find invite event for third_party_invite - "
|
||||||
|
"discarding: %s" % (event_dict,)
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
display_name = original_invite.content["display_name"]
|
||||||
|
event_dict["content"]["third_party_invite"]["display_name"] = display_name
|
||||||
|
builder = self.event_builder_factory.new(event_dict)
|
||||||
|
EventValidator().validate_new(builder)
|
||||||
|
event, context = yield self._create_new_client_event(builder=builder)
|
||||||
|
defer.returnValue((event, context))
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _validate_keyserver(self, event, auth_events):
|
def _validate_keyserver(self, event, auth_events):
|
||||||
token = event.content["third_party_invite"]["signed"]["token"]
|
token = event.content["third_party_invite"]["signed"]["token"]
|
||||||
|
|
|
@ -20,7 +20,6 @@ from synapse.api.errors import (
|
||||||
CodeMessageException
|
CodeMessageException
|
||||||
)
|
)
|
||||||
from ._base import BaseHandler
|
from ._base import BaseHandler
|
||||||
from synapse.http.client import SimpleHttpClient
|
|
||||||
from synapse.util.async import run_on_reactor
|
from synapse.util.async import run_on_reactor
|
||||||
from synapse.api.errors import SynapseError
|
from synapse.api.errors import SynapseError
|
||||||
|
|
||||||
|
@ -35,13 +34,12 @@ class IdentityHandler(BaseHandler):
|
||||||
def __init__(self, hs):
|
def __init__(self, hs):
|
||||||
super(IdentityHandler, self).__init__(hs)
|
super(IdentityHandler, self).__init__(hs)
|
||||||
|
|
||||||
|
self.http_client = hs.get_simple_http_client()
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def threepid_from_creds(self, creds):
|
def threepid_from_creds(self, creds):
|
||||||
yield run_on_reactor()
|
yield run_on_reactor()
|
||||||
|
|
||||||
# TODO: get this from the homeserver rather than creating a new one for
|
|
||||||
# each request
|
|
||||||
http_client = SimpleHttpClient(self.hs)
|
|
||||||
# XXX: make this configurable!
|
# XXX: make this configurable!
|
||||||
# trustedIdServers = ['matrix.org', 'localhost:8090']
|
# trustedIdServers = ['matrix.org', 'localhost:8090']
|
||||||
trustedIdServers = ['matrix.org', 'vector.im']
|
trustedIdServers = ['matrix.org', 'vector.im']
|
||||||
|
@ -67,7 +65,7 @@ class IdentityHandler(BaseHandler):
|
||||||
|
|
||||||
data = {}
|
data = {}
|
||||||
try:
|
try:
|
||||||
data = yield http_client.get_json(
|
data = yield self.http_client.get_json(
|
||||||
"https://%s%s" % (
|
"https://%s%s" % (
|
||||||
id_server,
|
id_server,
|
||||||
"/_matrix/identity/api/v1/3pid/getValidated3pid"
|
"/_matrix/identity/api/v1/3pid/getValidated3pid"
|
||||||
|
@ -85,7 +83,6 @@ class IdentityHandler(BaseHandler):
|
||||||
def bind_threepid(self, creds, mxid):
|
def bind_threepid(self, creds, mxid):
|
||||||
yield run_on_reactor()
|
yield run_on_reactor()
|
||||||
logger.debug("binding threepid %r to %s", creds, mxid)
|
logger.debug("binding threepid %r to %s", creds, mxid)
|
||||||
http_client = SimpleHttpClient(self.hs)
|
|
||||||
data = None
|
data = None
|
||||||
|
|
||||||
if 'id_server' in creds:
|
if 'id_server' in creds:
|
||||||
|
@ -103,7 +100,7 @@ class IdentityHandler(BaseHandler):
|
||||||
raise SynapseError(400, "No client_secret in creds")
|
raise SynapseError(400, "No client_secret in creds")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
data = yield http_client.post_urlencoded_get_json(
|
data = yield self.http_client.post_urlencoded_get_json(
|
||||||
"https://%s%s" % (
|
"https://%s%s" % (
|
||||||
id_server, "/_matrix/identity/api/v1/3pid/bind"
|
id_server, "/_matrix/identity/api/v1/3pid/bind"
|
||||||
),
|
),
|
||||||
|
@ -121,7 +118,6 @@ class IdentityHandler(BaseHandler):
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def requestEmailToken(self, id_server, email, client_secret, send_attempt, **kwargs):
|
def requestEmailToken(self, id_server, email, client_secret, send_attempt, **kwargs):
|
||||||
yield run_on_reactor()
|
yield run_on_reactor()
|
||||||
http_client = SimpleHttpClient(self.hs)
|
|
||||||
|
|
||||||
params = {
|
params = {
|
||||||
'email': email,
|
'email': email,
|
||||||
|
@ -131,7 +127,7 @@ class IdentityHandler(BaseHandler):
|
||||||
params.update(kwargs)
|
params.update(kwargs)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
data = yield http_client.post_urlencoded_get_json(
|
data = yield self.http_client.post_urlencoded_get_json(
|
||||||
"https://%s%s" % (
|
"https://%s%s" % (
|
||||||
id_server,
|
id_server,
|
||||||
"/_matrix/identity/api/v1/validate/email/requestToken"
|
"/_matrix/identity/api/v1/validate/email/requestToken"
|
||||||
|
|
|
@ -22,15 +22,22 @@ from synapse.events.utils import serialize_event
|
||||||
from synapse.events.validator import EventValidator
|
from synapse.events.validator import EventValidator
|
||||||
from synapse.util import unwrapFirstError
|
from synapse.util import unwrapFirstError
|
||||||
from synapse.util.logcontext import PreserveLoggingContext
|
from synapse.util.logcontext import PreserveLoggingContext
|
||||||
|
from synapse.util.caches.snapshot_cache import SnapshotCache
|
||||||
from synapse.types import UserID, RoomStreamToken, StreamToken
|
from synapse.types import UserID, RoomStreamToken, StreamToken
|
||||||
|
|
||||||
from ._base import BaseHandler
|
from ._base import BaseHandler
|
||||||
|
|
||||||
|
from canonicaljson import encode_canonical_json
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def collect_presencelike_data(distributor, user, content):
|
||||||
|
return distributor.fire("collect_presencelike_data", user, content)
|
||||||
|
|
||||||
|
|
||||||
class MessageHandler(BaseHandler):
|
class MessageHandler(BaseHandler):
|
||||||
|
|
||||||
def __init__(self, hs):
|
def __init__(self, hs):
|
||||||
|
@ -39,6 +46,7 @@ class MessageHandler(BaseHandler):
|
||||||
self.state = hs.get_state_handler()
|
self.state = hs.get_state_handler()
|
||||||
self.clock = hs.get_clock()
|
self.clock = hs.get_clock()
|
||||||
self.validator = EventValidator()
|
self.validator = EventValidator()
|
||||||
|
self.snapshot_cache = SnapshotCache()
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def get_message(self, msg_id=None, room_id=None, sender_id=None,
|
def get_message(self, msg_id=None, room_id=None, sender_id=None,
|
||||||
|
@ -195,10 +203,8 @@ class MessageHandler(BaseHandler):
|
||||||
if membership == Membership.JOIN:
|
if membership == Membership.JOIN:
|
||||||
joinee = UserID.from_string(builder.state_key)
|
joinee = UserID.from_string(builder.state_key)
|
||||||
# If event doesn't include a display name, add one.
|
# If event doesn't include a display name, add one.
|
||||||
yield self.distributor.fire(
|
yield collect_presencelike_data(
|
||||||
"collect_presencelike_data",
|
self.distributor, joinee, builder.content
|
||||||
joinee,
|
|
||||||
builder.content
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if token_id is not None:
|
if token_id is not None:
|
||||||
|
@ -211,6 +217,16 @@ class MessageHandler(BaseHandler):
|
||||||
builder=builder,
|
builder=builder,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if event.is_state():
|
||||||
|
prev_state = context.current_state.get((event.type, event.state_key))
|
||||||
|
if prev_state and event.user_id == prev_state.user_id:
|
||||||
|
prev_content = encode_canonical_json(prev_state.content)
|
||||||
|
next_content = encode_canonical_json(event.content)
|
||||||
|
if prev_content == next_content:
|
||||||
|
# Duplicate suppression for state updates with same sender
|
||||||
|
# and content.
|
||||||
|
defer.returnValue(prev_state)
|
||||||
|
|
||||||
if event.type == EventTypes.Member:
|
if event.type == EventTypes.Member:
|
||||||
member_handler = self.hs.get_handlers().room_member_handler
|
member_handler = self.hs.get_handlers().room_member_handler
|
||||||
yield member_handler.change_membership(event, context, is_guest=is_guest)
|
yield member_handler.change_membership(event, context, is_guest=is_guest)
|
||||||
|
@ -312,7 +328,6 @@ class MessageHandler(BaseHandler):
|
||||||
[serialize_event(c, now) for c in room_state.values()]
|
[serialize_event(c, now) for c in room_state.values()]
|
||||||
)
|
)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
|
||||||
def snapshot_all_rooms(self, user_id=None, pagin_config=None,
|
def snapshot_all_rooms(self, user_id=None, pagin_config=None,
|
||||||
as_client_event=True, include_archived=False):
|
as_client_event=True, include_archived=False):
|
||||||
"""Retrieve a snapshot of all rooms the user is invited or has joined.
|
"""Retrieve a snapshot of all rooms the user is invited or has joined.
|
||||||
|
@ -332,6 +347,28 @@ class MessageHandler(BaseHandler):
|
||||||
is joined on, may return a "messages" key with messages, depending
|
is joined on, may return a "messages" key with messages, depending
|
||||||
on the specified PaginationConfig.
|
on the specified PaginationConfig.
|
||||||
"""
|
"""
|
||||||
|
key = (
|
||||||
|
user_id,
|
||||||
|
pagin_config.from_token,
|
||||||
|
pagin_config.to_token,
|
||||||
|
pagin_config.direction,
|
||||||
|
pagin_config.limit,
|
||||||
|
as_client_event,
|
||||||
|
include_archived,
|
||||||
|
)
|
||||||
|
now_ms = self.clock.time_msec()
|
||||||
|
result = self.snapshot_cache.get(now_ms, key)
|
||||||
|
if result is not None:
|
||||||
|
return result
|
||||||
|
|
||||||
|
return self.snapshot_cache.set(now_ms, key, self._snapshot_all_rooms(
|
||||||
|
user_id, pagin_config, as_client_event, include_archived
|
||||||
|
))
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def _snapshot_all_rooms(self, user_id=None, pagin_config=None,
|
||||||
|
as_client_event=True, include_archived=False):
|
||||||
|
|
||||||
memberships = [Membership.INVITE, Membership.JOIN]
|
memberships = [Membership.INVITE, Membership.JOIN]
|
||||||
if include_archived:
|
if include_archived:
|
||||||
memberships.append(Membership.LEAVE)
|
memberships.append(Membership.LEAVE)
|
||||||
|
@ -359,6 +396,10 @@ class MessageHandler(BaseHandler):
|
||||||
|
|
||||||
tags_by_room = yield self.store.get_tags_for_user(user_id)
|
tags_by_room = yield self.store.get_tags_for_user(user_id)
|
||||||
|
|
||||||
|
account_data, account_data_by_room = (
|
||||||
|
yield self.store.get_account_data_for_user(user_id)
|
||||||
|
)
|
||||||
|
|
||||||
public_room_ids = yield self.store.get_public_room_ids()
|
public_room_ids = yield self.store.get_public_room_ids()
|
||||||
|
|
||||||
limit = pagin_config.limit
|
limit = pagin_config.limit
|
||||||
|
@ -436,14 +477,22 @@ class MessageHandler(BaseHandler):
|
||||||
for c in current_state.values()
|
for c in current_state.values()
|
||||||
]
|
]
|
||||||
|
|
||||||
account_data = []
|
account_data_events = []
|
||||||
tags = tags_by_room.get(event.room_id)
|
tags = tags_by_room.get(event.room_id)
|
||||||
if tags:
|
if tags:
|
||||||
account_data.append({
|
account_data_events.append({
|
||||||
"type": "m.tag",
|
"type": "m.tag",
|
||||||
"content": {"tags": tags},
|
"content": {"tags": tags},
|
||||||
})
|
})
|
||||||
d["account_data"] = account_data
|
|
||||||
|
account_data = account_data_by_room.get(event.room_id, {})
|
||||||
|
for account_data_type, content in account_data.items():
|
||||||
|
account_data_events.append({
|
||||||
|
"type": account_data_type,
|
||||||
|
"content": content,
|
||||||
|
})
|
||||||
|
|
||||||
|
d["account_data"] = account_data_events
|
||||||
except:
|
except:
|
||||||
logger.exception("Failed to get snapshot")
|
logger.exception("Failed to get snapshot")
|
||||||
|
|
||||||
|
@ -456,9 +505,17 @@ class MessageHandler(BaseHandler):
|
||||||
consumeErrors=True
|
consumeErrors=True
|
||||||
).addErrback(unwrapFirstError)
|
).addErrback(unwrapFirstError)
|
||||||
|
|
||||||
|
account_data_events = []
|
||||||
|
for account_data_type, content in account_data.items():
|
||||||
|
account_data_events.append({
|
||||||
|
"type": account_data_type,
|
||||||
|
"content": content,
|
||||||
|
})
|
||||||
|
|
||||||
ret = {
|
ret = {
|
||||||
"rooms": rooms_ret,
|
"rooms": rooms_ret,
|
||||||
"presence": presence,
|
"presence": presence,
|
||||||
|
"account_data": account_data_events,
|
||||||
"receipts": receipt,
|
"receipts": receipt,
|
||||||
"end": now_token.to_string(),
|
"end": now_token.to_string(),
|
||||||
}
|
}
|
||||||
|
@ -498,14 +555,22 @@ class MessageHandler(BaseHandler):
|
||||||
user_id, room_id, pagin_config, membership, member_event_id, is_guest
|
user_id, room_id, pagin_config, membership, member_event_id, is_guest
|
||||||
)
|
)
|
||||||
|
|
||||||
account_data = []
|
account_data_events = []
|
||||||
tags = yield self.store.get_tags_for_room(user_id, room_id)
|
tags = yield self.store.get_tags_for_room(user_id, room_id)
|
||||||
if tags:
|
if tags:
|
||||||
account_data.append({
|
account_data_events.append({
|
||||||
"type": "m.tag",
|
"type": "m.tag",
|
||||||
"content": {"tags": tags},
|
"content": {"tags": tags},
|
||||||
})
|
})
|
||||||
result["account_data"] = account_data
|
|
||||||
|
account_data = yield self.store.get_account_data_for_room(user_id, room_id)
|
||||||
|
for account_data_type, content in account_data.items():
|
||||||
|
account_data_events.append({
|
||||||
|
"type": account_data_type,
|
||||||
|
"content": content,
|
||||||
|
})
|
||||||
|
|
||||||
|
result["account_data"] = account_data_events
|
||||||
|
|
||||||
defer.returnValue(result)
|
defer.returnValue(result)
|
||||||
|
|
||||||
|
@ -588,23 +653,28 @@ class MessageHandler(BaseHandler):
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def get_presence():
|
def get_presence():
|
||||||
states = {}
|
states = yield presence_handler.get_states(
|
||||||
if not is_guest:
|
target_users=[UserID.from_string(m.user_id) for m in room_members],
|
||||||
states = yield presence_handler.get_states(
|
auth_user=auth_user,
|
||||||
target_users=[UserID.from_string(m.user_id) for m in room_members],
|
as_event=True,
|
||||||
auth_user=auth_user,
|
check_auth=False,
|
||||||
as_event=True,
|
)
|
||||||
check_auth=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
defer.returnValue(states.values())
|
defer.returnValue(states.values())
|
||||||
|
|
||||||
receipts_handler = self.hs.get_handlers().receipts_handler
|
@defer.inlineCallbacks
|
||||||
|
def get_receipts():
|
||||||
|
receipts_handler = self.hs.get_handlers().receipts_handler
|
||||||
|
receipts = yield receipts_handler.get_receipts_for_room(
|
||||||
|
room_id,
|
||||||
|
now_token.receipt_key
|
||||||
|
)
|
||||||
|
defer.returnValue(receipts)
|
||||||
|
|
||||||
presence, receipts, (messages, token) = yield defer.gatherResults(
|
presence, receipts, (messages, token) = yield defer.gatherResults(
|
||||||
[
|
[
|
||||||
get_presence(),
|
get_presence(),
|
||||||
receipts_handler.get_receipts_for_room(room_id, now_token.receipt_key),
|
get_receipts(),
|
||||||
self.store.get_recent_events_for_room(
|
self.store.get_recent_events_for_room(
|
||||||
room_id,
|
room_id,
|
||||||
limit=limit,
|
limit=limit,
|
||||||
|
|
|
@ -62,6 +62,14 @@ def partitionbool(l, func):
|
||||||
return ret.get(True, []), ret.get(False, [])
|
return ret.get(True, []), ret.get(False, [])
|
||||||
|
|
||||||
|
|
||||||
|
def user_presence_changed(distributor, user, statuscache):
|
||||||
|
return distributor.fire("user_presence_changed", user, statuscache)
|
||||||
|
|
||||||
|
|
||||||
|
def collect_presencelike_data(distributor, user, content):
|
||||||
|
return distributor.fire("collect_presencelike_data", user, content)
|
||||||
|
|
||||||
|
|
||||||
class PresenceHandler(BaseHandler):
|
class PresenceHandler(BaseHandler):
|
||||||
|
|
||||||
STATE_LEVELS = {
|
STATE_LEVELS = {
|
||||||
|
@ -361,9 +369,7 @@ class PresenceHandler(BaseHandler):
|
||||||
yield self.store.set_presence_state(
|
yield self.store.set_presence_state(
|
||||||
target_user.localpart, state_to_store
|
target_user.localpart, state_to_store
|
||||||
)
|
)
|
||||||
yield self.distributor.fire(
|
yield collect_presencelike_data(self.distributor, target_user, state)
|
||||||
"collect_presencelike_data", target_user, state
|
|
||||||
)
|
|
||||||
|
|
||||||
if now_level > was_level:
|
if now_level > was_level:
|
||||||
state["last_active"] = self.clock.time_msec()
|
state["last_active"] = self.clock.time_msec()
|
||||||
|
@ -467,7 +473,7 @@ class PresenceHandler(BaseHandler):
|
||||||
)
|
)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def send_invite(self, observer_user, observed_user):
|
def send_presence_invite(self, observer_user, observed_user):
|
||||||
"""Request the presence of a local or remote user for a local user"""
|
"""Request the presence of a local or remote user for a local user"""
|
||||||
if not self.hs.is_mine(observer_user):
|
if not self.hs.is_mine(observer_user):
|
||||||
raise SynapseError(400, "User is not hosted on this Home Server")
|
raise SynapseError(400, "User is not hosted on this Home Server")
|
||||||
|
@ -878,7 +884,7 @@ class PresenceHandler(BaseHandler):
|
||||||
room_ids=room_ids,
|
room_ids=room_ids,
|
||||||
statuscache=statuscache,
|
statuscache=statuscache,
|
||||||
)
|
)
|
||||||
yield self.distributor.fire("user_presence_changed", user, statuscache)
|
yield user_presence_changed(self.distributor, user, statuscache)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def incoming_presence(self, origin, content):
|
def incoming_presence(self, origin, content):
|
||||||
|
@ -1116,9 +1122,7 @@ class PresenceHandler(BaseHandler):
|
||||||
self._user_cachemap[user].get_state()["last_active"]
|
self._user_cachemap[user].get_state()["last_active"]
|
||||||
)
|
)
|
||||||
|
|
||||||
yield self.distributor.fire(
|
yield collect_presencelike_data(self.distributor, user, state)
|
||||||
"collect_presencelike_data", user, state
|
|
||||||
)
|
|
||||||
|
|
||||||
if "last_active" in state:
|
if "last_active" in state:
|
||||||
state = dict(state)
|
state = dict(state)
|
||||||
|
|
|
@ -28,6 +28,14 @@ import logging
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def changed_presencelike_data(distributor, user, state):
|
||||||
|
return distributor.fire("changed_presencelike_data", user, state)
|
||||||
|
|
||||||
|
|
||||||
|
def collect_presencelike_data(distributor, user, content):
|
||||||
|
return distributor.fire("collect_presencelike_data", user, content)
|
||||||
|
|
||||||
|
|
||||||
class ProfileHandler(BaseHandler):
|
class ProfileHandler(BaseHandler):
|
||||||
|
|
||||||
def __init__(self, hs):
|
def __init__(self, hs):
|
||||||
|
@ -95,11 +103,9 @@ class ProfileHandler(BaseHandler):
|
||||||
target_user.localpart, new_displayname
|
target_user.localpart, new_displayname
|
||||||
)
|
)
|
||||||
|
|
||||||
yield self.distributor.fire(
|
yield changed_presencelike_data(self.distributor, target_user, {
|
||||||
"changed_presencelike_data", target_user, {
|
"displayname": new_displayname,
|
||||||
"displayname": new_displayname,
|
})
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
yield self._update_join_states(target_user)
|
yield self._update_join_states(target_user)
|
||||||
|
|
||||||
|
@ -144,11 +150,9 @@ class ProfileHandler(BaseHandler):
|
||||||
target_user.localpart, new_avatar_url
|
target_user.localpart, new_avatar_url
|
||||||
)
|
)
|
||||||
|
|
||||||
yield self.distributor.fire(
|
yield changed_presencelike_data(self.distributor, target_user, {
|
||||||
"changed_presencelike_data", target_user, {
|
"avatar_url": new_avatar_url,
|
||||||
"avatar_url": new_avatar_url,
|
})
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
yield self._update_join_states(target_user)
|
yield self._update_join_states(target_user)
|
||||||
|
|
||||||
|
@ -208,9 +212,7 @@ class ProfileHandler(BaseHandler):
|
||||||
"membership": Membership.JOIN,
|
"membership": Membership.JOIN,
|
||||||
}
|
}
|
||||||
|
|
||||||
yield self.distributor.fire(
|
yield collect_presencelike_data(self.distributor, user, content)
|
||||||
"collect_presencelike_data", user, content
|
|
||||||
)
|
|
||||||
|
|
||||||
msg_handler = self.hs.get_handlers().message_handler
|
msg_handler = self.hs.get_handlers().message_handler
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -31,6 +31,10 @@ import urllib
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def registered_user(distributor, user):
|
||||||
|
return distributor.fire("registered_user", user)
|
||||||
|
|
||||||
|
|
||||||
class RegistrationHandler(BaseHandler):
|
class RegistrationHandler(BaseHandler):
|
||||||
|
|
||||||
def __init__(self, hs):
|
def __init__(self, hs):
|
||||||
|
@ -38,6 +42,7 @@ class RegistrationHandler(BaseHandler):
|
||||||
|
|
||||||
self.distributor = hs.get_distributor()
|
self.distributor = hs.get_distributor()
|
||||||
self.distributor.declare("registered_user")
|
self.distributor.declare("registered_user")
|
||||||
|
self.captcha_client = CaptchaServerHttpClient(hs)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def check_username(self, localpart):
|
def check_username(self, localpart):
|
||||||
|
@ -98,7 +103,7 @@ class RegistrationHandler(BaseHandler):
|
||||||
password_hash=password_hash
|
password_hash=password_hash
|
||||||
)
|
)
|
||||||
|
|
||||||
yield self.distributor.fire("registered_user", user)
|
yield registered_user(self.distributor, user)
|
||||||
else:
|
else:
|
||||||
# autogen a random user ID
|
# autogen a random user ID
|
||||||
attempts = 0
|
attempts = 0
|
||||||
|
@ -117,7 +122,7 @@ class RegistrationHandler(BaseHandler):
|
||||||
token=token,
|
token=token,
|
||||||
password_hash=password_hash)
|
password_hash=password_hash)
|
||||||
|
|
||||||
self.distributor.fire("registered_user", user)
|
yield registered_user(self.distributor, user)
|
||||||
except SynapseError:
|
except SynapseError:
|
||||||
# if user id is taken, just generate another
|
# if user id is taken, just generate another
|
||||||
user_id = None
|
user_id = None
|
||||||
|
@ -127,25 +132,9 @@ class RegistrationHandler(BaseHandler):
|
||||||
raise RegistrationError(
|
raise RegistrationError(
|
||||||
500, "Cannot generate user ID.")
|
500, "Cannot generate user ID.")
|
||||||
|
|
||||||
# create a default avatar for the user
|
# We used to generate default identicons here, but nowadays
|
||||||
# XXX: ideally clients would explicitly specify one, but given they don't
|
# we want clients to generate their own as part of their branding
|
||||||
# and we want consistent and pretty identicons for random users, we'll
|
# rather than there being consistent matrix-wide ones, so we don't.
|
||||||
# do it here.
|
|
||||||
try:
|
|
||||||
auth_user = UserID.from_string(user_id)
|
|
||||||
media_repository = self.hs.get_resource_for_media_repository()
|
|
||||||
identicon_resource = media_repository.getChildWithDefault("identicon", None)
|
|
||||||
upload_resource = media_repository.getChildWithDefault("upload", None)
|
|
||||||
identicon_bytes = identicon_resource.generate_identicon(user_id, 320, 320)
|
|
||||||
content_uri = yield upload_resource.create_content(
|
|
||||||
"image/png", None, identicon_bytes, len(identicon_bytes), auth_user
|
|
||||||
)
|
|
||||||
profile_handler = self.hs.get_handlers().profile_handler
|
|
||||||
profile_handler.set_avatar_url(
|
|
||||||
auth_user, auth_user, ("%s#auto" % (content_uri,))
|
|
||||||
)
|
|
||||||
except NotImplementedError:
|
|
||||||
pass # make tests pass without messing around creating default avatars
|
|
||||||
|
|
||||||
defer.returnValue((user_id, token))
|
defer.returnValue((user_id, token))
|
||||||
|
|
||||||
|
@ -167,7 +156,7 @@ class RegistrationHandler(BaseHandler):
|
||||||
token=token,
|
token=token,
|
||||||
password_hash=""
|
password_hash=""
|
||||||
)
|
)
|
||||||
self.distributor.fire("registered_user", user)
|
registered_user(self.distributor, user)
|
||||||
defer.returnValue((user_id, token))
|
defer.returnValue((user_id, token))
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
|
@ -215,7 +204,7 @@ class RegistrationHandler(BaseHandler):
|
||||||
token=token,
|
token=token,
|
||||||
password_hash=None
|
password_hash=None
|
||||||
)
|
)
|
||||||
yield self.distributor.fire("registered_user", user)
|
yield registered_user(self.distributor, user)
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
yield self.store.add_access_token_to_user(user_id, token)
|
yield self.store.add_access_token_to_user(user_id, token)
|
||||||
# Ignore Registration errors
|
# Ignore Registration errors
|
||||||
|
@ -302,10 +291,7 @@ class RegistrationHandler(BaseHandler):
|
||||||
"""
|
"""
|
||||||
Used only by c/s api v1
|
Used only by c/s api v1
|
||||||
"""
|
"""
|
||||||
# TODO: get this from the homeserver rather than creating a new one for
|
data = yield self.captcha_client.post_urlencoded_get_raw(
|
||||||
# each request
|
|
||||||
client = CaptchaServerHttpClient(self.hs)
|
|
||||||
data = yield client.post_urlencoded_get_raw(
|
|
||||||
"http://www.google.com:80/recaptcha/api/verify",
|
"http://www.google.com:80/recaptcha/api/verify",
|
||||||
args={
|
args={
|
||||||
'privatekey': private_key,
|
'privatekey': private_key,
|
||||||
|
|
|
@ -41,6 +41,18 @@ logger = logging.getLogger(__name__)
|
||||||
id_server_scheme = "https://"
|
id_server_scheme = "https://"
|
||||||
|
|
||||||
|
|
||||||
|
def collect_presencelike_data(distributor, user, content):
|
||||||
|
return distributor.fire("collect_presencelike_data", user, content)
|
||||||
|
|
||||||
|
|
||||||
|
def user_left_room(distributor, user, room_id):
|
||||||
|
return distributor.fire("user_left_room", user=user, room_id=room_id)
|
||||||
|
|
||||||
|
|
||||||
|
def user_joined_room(distributor, user, room_id):
|
||||||
|
return distributor.fire("user_joined_room", user=user, room_id=room_id)
|
||||||
|
|
||||||
|
|
||||||
class RoomCreationHandler(BaseHandler):
|
class RoomCreationHandler(BaseHandler):
|
||||||
|
|
||||||
PRESETS_DICT = {
|
PRESETS_DICT = {
|
||||||
|
@ -438,9 +450,7 @@ class RoomMemberHandler(BaseHandler):
|
||||||
|
|
||||||
if prev_state and prev_state.membership == Membership.JOIN:
|
if prev_state and prev_state.membership == Membership.JOIN:
|
||||||
user = UserID.from_string(event.user_id)
|
user = UserID.from_string(event.user_id)
|
||||||
self.distributor.fire(
|
user_left_room(self.distributor, user, event.room_id)
|
||||||
"user_left_room", user=user, room_id=event.room_id
|
|
||||||
)
|
|
||||||
|
|
||||||
defer.returnValue({"room_id": room_id})
|
defer.returnValue({"room_id": room_id})
|
||||||
|
|
||||||
|
@ -458,9 +468,7 @@ class RoomMemberHandler(BaseHandler):
|
||||||
raise SynapseError(404, "No known servers")
|
raise SynapseError(404, "No known servers")
|
||||||
|
|
||||||
# If event doesn't include a display name, add one.
|
# If event doesn't include a display name, add one.
|
||||||
yield self.distributor.fire(
|
yield collect_presencelike_data(self.distributor, joinee, content)
|
||||||
"collect_presencelike_data", joinee, content
|
|
||||||
)
|
|
||||||
|
|
||||||
content.update({"membership": Membership.JOIN})
|
content.update({"membership": Membership.JOIN})
|
||||||
builder = self.event_builder_factory.new({
|
builder = self.event_builder_factory.new({
|
||||||
|
@ -517,10 +525,13 @@ class RoomMemberHandler(BaseHandler):
|
||||||
do_auth=do_auth,
|
do_auth=do_auth,
|
||||||
)
|
)
|
||||||
|
|
||||||
user = UserID.from_string(event.user_id)
|
prev_state = context.current_state.get((event.type, event.state_key))
|
||||||
yield self.distributor.fire(
|
if not prev_state or prev_state.membership != Membership.JOIN:
|
||||||
"user_joined_room", user=user, room_id=room_id
|
# Only fire user_joined_room if the user has acutally joined the
|
||||||
)
|
# room. Don't bother if the user is just changing their profile
|
||||||
|
# info.
|
||||||
|
user = UserID.from_string(event.user_id)
|
||||||
|
yield user_joined_room(self.distributor, user, room_id)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def get_inviter(self, event):
|
def get_inviter(self, event):
|
||||||
|
@ -693,13 +704,48 @@ class RoomMemberHandler(BaseHandler):
|
||||||
token_id,
|
token_id,
|
||||||
txn_id
|
txn_id
|
||||||
):
|
):
|
||||||
|
room_state = yield self.hs.get_state_handler().get_current_state(room_id)
|
||||||
|
|
||||||
|
inviter_display_name = ""
|
||||||
|
inviter_avatar_url = ""
|
||||||
|
member_event = room_state.get((EventTypes.Member, user.to_string()))
|
||||||
|
if member_event:
|
||||||
|
inviter_display_name = member_event.content.get("displayname", "")
|
||||||
|
inviter_avatar_url = member_event.content.get("avatar_url", "")
|
||||||
|
|
||||||
|
canonical_room_alias = ""
|
||||||
|
canonical_alias_event = room_state.get((EventTypes.CanonicalAlias, ""))
|
||||||
|
if canonical_alias_event:
|
||||||
|
canonical_room_alias = canonical_alias_event.content.get("alias", "")
|
||||||
|
|
||||||
|
room_name = ""
|
||||||
|
room_name_event = room_state.get((EventTypes.Name, ""))
|
||||||
|
if room_name_event:
|
||||||
|
room_name = room_name_event.content.get("name", "")
|
||||||
|
|
||||||
|
room_join_rules = ""
|
||||||
|
join_rules_event = room_state.get((EventTypes.JoinRules, ""))
|
||||||
|
if join_rules_event:
|
||||||
|
room_join_rules = join_rules_event.content.get("join_rule", "")
|
||||||
|
|
||||||
|
room_avatar_url = ""
|
||||||
|
room_avatar_event = room_state.get((EventTypes.RoomAvatar, ""))
|
||||||
|
if room_avatar_event:
|
||||||
|
room_avatar_url = room_avatar_event.content.get("url", "")
|
||||||
|
|
||||||
token, public_key, key_validity_url, display_name = (
|
token, public_key, key_validity_url, display_name = (
|
||||||
yield self._ask_id_server_for_third_party_invite(
|
yield self._ask_id_server_for_third_party_invite(
|
||||||
id_server,
|
id_server=id_server,
|
||||||
medium,
|
medium=medium,
|
||||||
address,
|
address=address,
|
||||||
room_id,
|
room_id=room_id,
|
||||||
user.to_string()
|
inviter_user_id=user.to_string(),
|
||||||
|
room_alias=canonical_room_alias,
|
||||||
|
room_avatar_url=room_avatar_url,
|
||||||
|
room_join_rules=room_join_rules,
|
||||||
|
room_name=room_name,
|
||||||
|
inviter_display_name=inviter_display_name,
|
||||||
|
inviter_avatar_url=inviter_avatar_url
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
msg_handler = self.hs.get_handlers().message_handler
|
msg_handler = self.hs.get_handlers().message_handler
|
||||||
|
@ -721,7 +767,19 @@ class RoomMemberHandler(BaseHandler):
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _ask_id_server_for_third_party_invite(
|
def _ask_id_server_for_third_party_invite(
|
||||||
self, id_server, medium, address, room_id, sender):
|
self,
|
||||||
|
id_server,
|
||||||
|
medium,
|
||||||
|
address,
|
||||||
|
room_id,
|
||||||
|
inviter_user_id,
|
||||||
|
room_alias,
|
||||||
|
room_avatar_url,
|
||||||
|
room_join_rules,
|
||||||
|
room_name,
|
||||||
|
inviter_display_name,
|
||||||
|
inviter_avatar_url
|
||||||
|
):
|
||||||
is_url = "%s%s/_matrix/identity/api/v1/store-invite" % (
|
is_url = "%s%s/_matrix/identity/api/v1/store-invite" % (
|
||||||
id_server_scheme, id_server,
|
id_server_scheme, id_server,
|
||||||
)
|
)
|
||||||
|
@ -731,7 +789,13 @@ class RoomMemberHandler(BaseHandler):
|
||||||
"medium": medium,
|
"medium": medium,
|
||||||
"address": address,
|
"address": address,
|
||||||
"room_id": room_id,
|
"room_id": room_id,
|
||||||
"sender": sender,
|
"room_alias": room_alias,
|
||||||
|
"room_avatar_url": room_avatar_url,
|
||||||
|
"room_join_rules": room_join_rules,
|
||||||
|
"room_name": room_name,
|
||||||
|
"sender": inviter_user_id,
|
||||||
|
"sender_display_name": inviter_display_name,
|
||||||
|
"sender_avatar_url": inviter_avatar_url,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
# TODO: Check for success
|
# TODO: Check for success
|
||||||
|
@ -743,13 +807,17 @@ class RoomMemberHandler(BaseHandler):
|
||||||
)
|
)
|
||||||
defer.returnValue((token, public_key, key_validity_url, display_name))
|
defer.returnValue((token, public_key, key_validity_url, display_name))
|
||||||
|
|
||||||
|
def forget(self, user, room_id):
|
||||||
|
return self.store.forget(user.to_string(), room_id)
|
||||||
|
|
||||||
|
|
||||||
class RoomListHandler(BaseHandler):
|
class RoomListHandler(BaseHandler):
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def get_public_room_list(self):
|
def get_public_room_list(self):
|
||||||
chunk = yield self.store.get_rooms(is_public=True)
|
chunk = yield self.store.get_rooms(is_public=True)
|
||||||
results = yield defer.gatherResults(
|
|
||||||
|
room_members = yield defer.gatherResults(
|
||||||
[
|
[
|
||||||
self.store.get_users_in_room(room["room_id"])
|
self.store.get_users_in_room(room["room_id"])
|
||||||
for room in chunk
|
for room in chunk
|
||||||
|
@ -757,12 +825,30 @@ class RoomListHandler(BaseHandler):
|
||||||
consumeErrors=True,
|
consumeErrors=True,
|
||||||
).addErrback(unwrapFirstError)
|
).addErrback(unwrapFirstError)
|
||||||
|
|
||||||
|
avatar_urls = yield defer.gatherResults(
|
||||||
|
[
|
||||||
|
self.get_room_avatar_url(room["room_id"])
|
||||||
|
for room in chunk
|
||||||
|
],
|
||||||
|
consumeErrors=True,
|
||||||
|
).addErrback(unwrapFirstError)
|
||||||
|
|
||||||
for i, room in enumerate(chunk):
|
for i, room in enumerate(chunk):
|
||||||
room["num_joined_members"] = len(results[i])
|
room["num_joined_members"] = len(room_members[i])
|
||||||
|
if avatar_urls[i]:
|
||||||
|
room["avatar_url"] = avatar_urls[i]
|
||||||
|
|
||||||
# FIXME (erikj): START is no longer a valid value
|
# FIXME (erikj): START is no longer a valid value
|
||||||
defer.returnValue({"start": "START", "end": "END", "chunk": chunk})
|
defer.returnValue({"start": "START", "end": "END", "chunk": chunk})
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def get_room_avatar_url(self, room_id):
|
||||||
|
event = yield self.hs.get_state_handler().get_current_state(
|
||||||
|
room_id, "m.room.avatar"
|
||||||
|
)
|
||||||
|
if event and "url" in event.content:
|
||||||
|
defer.returnValue(event.content["url"])
|
||||||
|
|
||||||
|
|
||||||
class RoomContextHandler(BaseHandler):
|
class RoomContextHandler(BaseHandler):
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
|
|
|
@ -131,6 +131,17 @@ class SearchHandler(BaseHandler):
|
||||||
if batch_group == "room_id":
|
if batch_group == "room_id":
|
||||||
room_ids.intersection_update({batch_group_key})
|
room_ids.intersection_update({batch_group_key})
|
||||||
|
|
||||||
|
if not room_ids:
|
||||||
|
defer.returnValue({
|
||||||
|
"search_categories": {
|
||||||
|
"room_events": {
|
||||||
|
"results": [],
|
||||||
|
"count": 0,
|
||||||
|
"highlights": [],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
rank_map = {} # event_id -> rank of event
|
rank_map = {} # event_id -> rank of event
|
||||||
allowed_events = []
|
allowed_events = []
|
||||||
room_groups = {} # Holds result of grouping by room, if applicable
|
room_groups = {} # Holds result of grouping by room, if applicable
|
||||||
|
@ -139,11 +150,22 @@ class SearchHandler(BaseHandler):
|
||||||
# Holds the next_batch for the entire result set if one of those exists
|
# Holds the next_batch for the entire result set if one of those exists
|
||||||
global_next_batch = None
|
global_next_batch = None
|
||||||
|
|
||||||
|
highlights = set()
|
||||||
|
|
||||||
|
count = None
|
||||||
|
|
||||||
if order_by == "rank":
|
if order_by == "rank":
|
||||||
results = yield self.store.search_msgs(
|
search_result = yield self.store.search_msgs(
|
||||||
room_ids, search_term, keys
|
room_ids, search_term, keys
|
||||||
)
|
)
|
||||||
|
|
||||||
|
count = search_result["count"]
|
||||||
|
|
||||||
|
if search_result["highlights"]:
|
||||||
|
highlights.update(search_result["highlights"])
|
||||||
|
|
||||||
|
results = search_result["results"]
|
||||||
|
|
||||||
results_map = {r["event"].event_id: r for r in results}
|
results_map = {r["event"].event_id: r for r in results}
|
||||||
|
|
||||||
rank_map.update({r["event"].event_id: r["rank"] for r in results})
|
rank_map.update({r["event"].event_id: r["rank"] for r in results})
|
||||||
|
@ -171,80 +193,78 @@ class SearchHandler(BaseHandler):
|
||||||
s["results"].append(e.event_id)
|
s["results"].append(e.event_id)
|
||||||
|
|
||||||
elif order_by == "recent":
|
elif order_by == "recent":
|
||||||
# In this case we specifically loop through each room as the given
|
room_events = []
|
||||||
# limit applies to each room, rather than a global list.
|
i = 0
|
||||||
# This is not necessarilly a good idea.
|
|
||||||
for room_id in room_ids:
|
pagination_token = batch_token
|
||||||
room_events = []
|
|
||||||
if batch_group == "room_id" and batch_group_key == room_id:
|
# We keep looping and we keep filtering until we reach the limit
|
||||||
pagination_token = batch_token
|
# or we run out of things.
|
||||||
else:
|
# But only go around 5 times since otherwise synapse will be sad.
|
||||||
|
while len(room_events) < search_filter.limit() and i < 5:
|
||||||
|
i += 1
|
||||||
|
search_result = yield self.store.search_rooms(
|
||||||
|
room_ids, search_term, keys, search_filter.limit() * 2,
|
||||||
|
pagination_token=pagination_token,
|
||||||
|
)
|
||||||
|
|
||||||
|
if search_result["highlights"]:
|
||||||
|
highlights.update(search_result["highlights"])
|
||||||
|
|
||||||
|
count = search_result["count"]
|
||||||
|
|
||||||
|
results = search_result["results"]
|
||||||
|
|
||||||
|
results_map = {r["event"].event_id: r for r in results}
|
||||||
|
|
||||||
|
rank_map.update({r["event"].event_id: r["rank"] for r in results})
|
||||||
|
|
||||||
|
filtered_events = search_filter.filter([
|
||||||
|
r["event"] for r in results
|
||||||
|
])
|
||||||
|
|
||||||
|
events = yield self._filter_events_for_client(
|
||||||
|
user.to_string(), filtered_events
|
||||||
|
)
|
||||||
|
|
||||||
|
room_events.extend(events)
|
||||||
|
room_events = room_events[:search_filter.limit()]
|
||||||
|
|
||||||
|
if len(results) < search_filter.limit() * 2:
|
||||||
pagination_token = None
|
pagination_token = None
|
||||||
i = 0
|
break
|
||||||
|
|
||||||
# We keep looping and we keep filtering until we reach the limit
|
|
||||||
# or we run out of things.
|
|
||||||
# But only go around 5 times since otherwise synapse will be sad.
|
|
||||||
while len(room_events) < search_filter.limit() and i < 5:
|
|
||||||
i += 1
|
|
||||||
results = yield self.store.search_room(
|
|
||||||
room_id, search_term, keys, search_filter.limit() * 2,
|
|
||||||
pagination_token=pagination_token,
|
|
||||||
)
|
|
||||||
|
|
||||||
results_map = {r["event"].event_id: r for r in results}
|
|
||||||
|
|
||||||
rank_map.update({r["event"].event_id: r["rank"] for r in results})
|
|
||||||
|
|
||||||
filtered_events = search_filter.filter([
|
|
||||||
r["event"] for r in results
|
|
||||||
])
|
|
||||||
|
|
||||||
events = yield self._filter_events_for_client(
|
|
||||||
user.to_string(), filtered_events
|
|
||||||
)
|
|
||||||
|
|
||||||
room_events.extend(events)
|
|
||||||
room_events = room_events[:search_filter.limit()]
|
|
||||||
|
|
||||||
if len(results) < search_filter.limit() * 2:
|
|
||||||
pagination_token = None
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
pagination_token = results[-1]["pagination_token"]
|
|
||||||
|
|
||||||
if room_events:
|
|
||||||
res = results_map[room_events[-1].event_id]
|
|
||||||
pagination_token = res["pagination_token"]
|
|
||||||
|
|
||||||
group = room_groups.setdefault(room_id, {})
|
|
||||||
if pagination_token:
|
|
||||||
next_batch = encode_base64("%s\n%s\n%s" % (
|
|
||||||
"room_id", room_id, pagination_token
|
|
||||||
))
|
|
||||||
group["next_batch"] = next_batch
|
|
||||||
|
|
||||||
if batch_token:
|
|
||||||
global_next_batch = next_batch
|
|
||||||
|
|
||||||
group["results"] = [e.event_id for e in room_events]
|
|
||||||
group["order"] = max(
|
|
||||||
e.origin_server_ts/1000 for e in room_events
|
|
||||||
if hasattr(e, "origin_server_ts")
|
|
||||||
)
|
|
||||||
|
|
||||||
allowed_events.extend(room_events)
|
|
||||||
|
|
||||||
# Normalize the group orders
|
|
||||||
if room_groups:
|
|
||||||
if len(room_groups) > 1:
|
|
||||||
mx = max(g["order"] for g in room_groups.values())
|
|
||||||
mn = min(g["order"] for g in room_groups.values())
|
|
||||||
|
|
||||||
for g in room_groups.values():
|
|
||||||
g["order"] = (g["order"] - mn) * 1.0 / (mx - mn)
|
|
||||||
else:
|
else:
|
||||||
room_groups.values()[0]["order"] = 1
|
pagination_token = results[-1]["pagination_token"]
|
||||||
|
|
||||||
|
for event in room_events:
|
||||||
|
group = room_groups.setdefault(event.room_id, {
|
||||||
|
"results": [],
|
||||||
|
})
|
||||||
|
group["results"].append(event.event_id)
|
||||||
|
|
||||||
|
if room_events and len(room_events) >= search_filter.limit():
|
||||||
|
last_event_id = room_events[-1].event_id
|
||||||
|
pagination_token = results_map[last_event_id]["pagination_token"]
|
||||||
|
|
||||||
|
# We want to respect the given batch group and group keys so
|
||||||
|
# that if people blindly use the top level `next_batch` token
|
||||||
|
# it returns more from the same group (if applicable) rather
|
||||||
|
# than reverting to searching all results again.
|
||||||
|
if batch_group and batch_group_key:
|
||||||
|
global_next_batch = encode_base64("%s\n%s\n%s" % (
|
||||||
|
batch_group, batch_group_key, pagination_token
|
||||||
|
))
|
||||||
|
else:
|
||||||
|
global_next_batch = encode_base64("%s\n%s\n%s" % (
|
||||||
|
"all", "", pagination_token
|
||||||
|
))
|
||||||
|
|
||||||
|
for room_id, group in room_groups.items():
|
||||||
|
group["next_batch"] = encode_base64("%s\n%s\n%s" % (
|
||||||
|
"room_id", room_id, pagination_token
|
||||||
|
))
|
||||||
|
|
||||||
|
allowed_events.extend(room_events)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# We should never get here due to the guard earlier.
|
# We should never get here due to the guard earlier.
|
||||||
|
@ -334,20 +354,19 @@ class SearchHandler(BaseHandler):
|
||||||
# We're now about to serialize the events. We should not make any
|
# We're now about to serialize the events. We should not make any
|
||||||
# blocking calls after this. Otherwise the 'age' will be wrong
|
# blocking calls after this. Otherwise the 'age' will be wrong
|
||||||
|
|
||||||
results = {
|
results = [
|
||||||
e.event_id: {
|
{
|
||||||
"rank": rank_map[e.event_id],
|
"rank": rank_map[e.event_id],
|
||||||
"result": serialize_event(e, time_now),
|
"result": serialize_event(e, time_now),
|
||||||
"context": contexts.get(e.event_id, {}),
|
"context": contexts.get(e.event_id, {}),
|
||||||
}
|
}
|
||||||
for e in allowed_events
|
for e in allowed_events
|
||||||
}
|
]
|
||||||
|
|
||||||
logger.info("Found %d results", len(results))
|
|
||||||
|
|
||||||
rooms_cat_res = {
|
rooms_cat_res = {
|
||||||
"results": results,
|
"results": results,
|
||||||
"count": len(results)
|
"count": count,
|
||||||
|
"highlights": list(highlights),
|
||||||
}
|
}
|
||||||
|
|
||||||
if state_results:
|
if state_results:
|
||||||
|
|
|
@ -15,8 +15,9 @@
|
||||||
|
|
||||||
from ._base import BaseHandler
|
from ._base import BaseHandler
|
||||||
|
|
||||||
from synapse.streams.config import PaginationConfig
|
|
||||||
from synapse.api.constants import Membership, EventTypes
|
from synapse.api.constants import Membership, EventTypes
|
||||||
|
from synapse.api.errors import GuestAccessError
|
||||||
|
from synapse.util import unwrapFirstError
|
||||||
|
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
|
@ -28,6 +29,7 @@ logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
SyncConfig = collections.namedtuple("SyncConfig", [
|
SyncConfig = collections.namedtuple("SyncConfig", [
|
||||||
"user",
|
"user",
|
||||||
|
"is_guest",
|
||||||
"filter",
|
"filter",
|
||||||
])
|
])
|
||||||
|
|
||||||
|
@ -100,6 +102,7 @@ class InvitedSyncResult(collections.namedtuple("InvitedSyncResult", [
|
||||||
class SyncResult(collections.namedtuple("SyncResult", [
|
class SyncResult(collections.namedtuple("SyncResult", [
|
||||||
"next_batch", # Token for the next sync
|
"next_batch", # Token for the next sync
|
||||||
"presence", # List of presence events for the user.
|
"presence", # List of presence events for the user.
|
||||||
|
"account_data", # List of account_data events for the user.
|
||||||
"joined", # JoinedSyncResult for each joined room.
|
"joined", # JoinedSyncResult for each joined room.
|
||||||
"invited", # InvitedSyncResult for each invited room.
|
"invited", # InvitedSyncResult for each invited room.
|
||||||
"archived", # ArchivedSyncResult for each archived room.
|
"archived", # ArchivedSyncResult for each archived room.
|
||||||
|
@ -115,6 +118,8 @@ class SyncResult(collections.namedtuple("SyncResult", [
|
||||||
self.presence or self.joined or self.invited
|
self.presence or self.joined or self.invited
|
||||||
)
|
)
|
||||||
|
|
||||||
|
GuestRoom = collections.namedtuple("GuestRoom", ("room_id", "membership"))
|
||||||
|
|
||||||
|
|
||||||
class SyncHandler(BaseHandler):
|
class SyncHandler(BaseHandler):
|
||||||
|
|
||||||
|
@ -133,6 +138,18 @@ class SyncHandler(BaseHandler):
|
||||||
A Deferred SyncResult.
|
A Deferred SyncResult.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
if sync_config.is_guest:
|
||||||
|
bad_rooms = []
|
||||||
|
for room_id in sync_config.filter.list_rooms():
|
||||||
|
world_readable = yield self._is_world_readable(room_id)
|
||||||
|
if not world_readable:
|
||||||
|
bad_rooms.append(room_id)
|
||||||
|
|
||||||
|
if bad_rooms:
|
||||||
|
raise GuestAccessError(
|
||||||
|
bad_rooms, 403, "Guest access not allowed"
|
||||||
|
)
|
||||||
|
|
||||||
if timeout == 0 or since_token is None or full_state:
|
if timeout == 0 or since_token is None or full_state:
|
||||||
# we are going to return immediately, so don't bother calling
|
# we are going to return immediately, so don't bother calling
|
||||||
# notifier.wait_for_events.
|
# notifier.wait_for_events.
|
||||||
|
@ -149,6 +166,17 @@ class SyncHandler(BaseHandler):
|
||||||
)
|
)
|
||||||
defer.returnValue(result)
|
defer.returnValue(result)
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def _is_world_readable(self, room_id):
|
||||||
|
state = yield self.hs.get_state_handler().get_current_state(
|
||||||
|
room_id,
|
||||||
|
EventTypes.RoomHistoryVisibility
|
||||||
|
)
|
||||||
|
if state and "history_visibility" in state.content:
|
||||||
|
defer.returnValue(state.content["history_visibility"] == "world_readable")
|
||||||
|
else:
|
||||||
|
defer.returnValue(False)
|
||||||
|
|
||||||
def current_sync_for_user(self, sync_config, since_token=None,
|
def current_sync_for_user(self, sync_config, since_token=None,
|
||||||
full_state=False):
|
full_state=False):
|
||||||
"""Get the sync for client needed to match what the server has now.
|
"""Get the sync for client needed to match what the server has now.
|
||||||
|
@ -172,47 +200,71 @@ class SyncHandler(BaseHandler):
|
||||||
"""
|
"""
|
||||||
now_token = yield self.event_sources.get_current_token()
|
now_token = yield self.event_sources.get_current_token()
|
||||||
|
|
||||||
now_token, ephemeral_by_room = yield self.ephemeral_by_room(
|
if sync_config.is_guest:
|
||||||
sync_config, now_token
|
room_list = [
|
||||||
)
|
GuestRoom(room_id, Membership.JOIN)
|
||||||
|
for room_id in sync_config.filter.list_rooms()
|
||||||
|
]
|
||||||
|
|
||||||
|
account_data = {}
|
||||||
|
account_data_by_room = {}
|
||||||
|
tags_by_room = {}
|
||||||
|
|
||||||
|
else:
|
||||||
|
membership_list = (Membership.INVITE, Membership.JOIN)
|
||||||
|
if sync_config.filter.include_leave:
|
||||||
|
membership_list += (Membership.LEAVE, Membership.BAN)
|
||||||
|
|
||||||
|
room_list = yield self.store.get_rooms_for_user_where_membership_is(
|
||||||
|
user_id=sync_config.user.to_string(),
|
||||||
|
membership_list=membership_list
|
||||||
|
)
|
||||||
|
|
||||||
|
account_data, account_data_by_room = (
|
||||||
|
yield self.store.get_account_data_for_user(
|
||||||
|
sync_config.user.to_string()
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
tags_by_room = yield self.store.get_tags_for_user(
|
||||||
|
sync_config.user.to_string()
|
||||||
|
)
|
||||||
|
|
||||||
presence_stream = self.event_sources.sources["presence"]
|
presence_stream = self.event_sources.sources["presence"]
|
||||||
# TODO (mjark): This looks wrong, shouldn't we be getting the presence
|
|
||||||
# UP to the present rather than after the present?
|
joined_room_ids = [
|
||||||
pagination_config = PaginationConfig(from_token=now_token)
|
room.room_id for room in room_list
|
||||||
presence, _ = yield presence_stream.get_pagination_rows(
|
if room.membership == Membership.JOIN
|
||||||
|
]
|
||||||
|
|
||||||
|
presence, _ = yield presence_stream.get_new_events(
|
||||||
|
from_key=0,
|
||||||
user=sync_config.user,
|
user=sync_config.user,
|
||||||
pagination_config=pagination_config.get_source_config("presence"),
|
room_ids=joined_room_ids,
|
||||||
key=None
|
is_guest=sync_config.is_guest,
|
||||||
)
|
|
||||||
room_list = yield self.store.get_rooms_for_user_where_membership_is(
|
|
||||||
user_id=sync_config.user.to_string(),
|
|
||||||
membership_list=(
|
|
||||||
Membership.INVITE,
|
|
||||||
Membership.JOIN,
|
|
||||||
Membership.LEAVE,
|
|
||||||
Membership.BAN
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
tags_by_room = yield self.store.get_tags_for_user(
|
now_token, ephemeral_by_room = yield self.ephemeral_by_room(
|
||||||
sync_config.user.to_string()
|
sync_config, now_token, joined_room_ids
|
||||||
)
|
)
|
||||||
|
|
||||||
joined = []
|
joined = []
|
||||||
invited = []
|
invited = []
|
||||||
archived = []
|
archived = []
|
||||||
|
deferreds = []
|
||||||
for event in room_list:
|
for event in room_list:
|
||||||
if event.membership == Membership.JOIN:
|
if event.membership == Membership.JOIN:
|
||||||
room_sync = yield self.full_state_sync_for_joined_room(
|
room_sync_deferred = self.full_state_sync_for_joined_room(
|
||||||
room_id=event.room_id,
|
room_id=event.room_id,
|
||||||
sync_config=sync_config,
|
sync_config=sync_config,
|
||||||
now_token=now_token,
|
now_token=now_token,
|
||||||
timeline_since_token=timeline_since_token,
|
timeline_since_token=timeline_since_token,
|
||||||
ephemeral_by_room=ephemeral_by_room,
|
ephemeral_by_room=ephemeral_by_room,
|
||||||
tags_by_room=tags_by_room,
|
tags_by_room=tags_by_room,
|
||||||
|
account_data_by_room=account_data_by_room,
|
||||||
)
|
)
|
||||||
joined.append(room_sync)
|
room_sync_deferred.addCallback(joined.append)
|
||||||
|
deferreds.append(room_sync_deferred)
|
||||||
elif event.membership == Membership.INVITE:
|
elif event.membership == Membership.INVITE:
|
||||||
invite = yield self.store.get_event(event.event_id)
|
invite = yield self.store.get_event(event.event_id)
|
||||||
invited.append(InvitedSyncResult(
|
invited.append(InvitedSyncResult(
|
||||||
|
@ -223,18 +275,25 @@ class SyncHandler(BaseHandler):
|
||||||
leave_token = now_token.copy_and_replace(
|
leave_token = now_token.copy_and_replace(
|
||||||
"room_key", "s%d" % (event.stream_ordering,)
|
"room_key", "s%d" % (event.stream_ordering,)
|
||||||
)
|
)
|
||||||
room_sync = yield self.full_state_sync_for_archived_room(
|
room_sync_deferred = self.full_state_sync_for_archived_room(
|
||||||
sync_config=sync_config,
|
sync_config=sync_config,
|
||||||
room_id=event.room_id,
|
room_id=event.room_id,
|
||||||
leave_event_id=event.event_id,
|
leave_event_id=event.event_id,
|
||||||
leave_token=leave_token,
|
leave_token=leave_token,
|
||||||
timeline_since_token=timeline_since_token,
|
timeline_since_token=timeline_since_token,
|
||||||
tags_by_room=tags_by_room,
|
tags_by_room=tags_by_room,
|
||||||
|
account_data_by_room=account_data_by_room,
|
||||||
)
|
)
|
||||||
archived.append(room_sync)
|
room_sync_deferred.addCallback(archived.append)
|
||||||
|
deferreds.append(room_sync_deferred)
|
||||||
|
|
||||||
|
yield defer.gatherResults(
|
||||||
|
deferreds, consumeErrors=True
|
||||||
|
).addErrback(unwrapFirstError)
|
||||||
|
|
||||||
defer.returnValue(SyncResult(
|
defer.returnValue(SyncResult(
|
||||||
presence=presence,
|
presence=presence,
|
||||||
|
account_data=self.account_data_for_user(account_data),
|
||||||
joined=joined,
|
joined=joined,
|
||||||
invited=invited,
|
invited=invited,
|
||||||
archived=archived,
|
archived=archived,
|
||||||
|
@ -244,7 +303,8 @@ class SyncHandler(BaseHandler):
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def full_state_sync_for_joined_room(self, room_id, sync_config,
|
def full_state_sync_for_joined_room(self, room_id, sync_config,
|
||||||
now_token, timeline_since_token,
|
now_token, timeline_since_token,
|
||||||
ephemeral_by_room, tags_by_room):
|
ephemeral_by_room, tags_by_room,
|
||||||
|
account_data_by_room):
|
||||||
"""Sync a room for a client which is starting without any state
|
"""Sync a room for a client which is starting without any state
|
||||||
Returns:
|
Returns:
|
||||||
A Deferred JoinedSyncResult.
|
A Deferred JoinedSyncResult.
|
||||||
|
@ -262,26 +322,47 @@ class SyncHandler(BaseHandler):
|
||||||
state=current_state,
|
state=current_state,
|
||||||
ephemeral=ephemeral_by_room.get(room_id, []),
|
ephemeral=ephemeral_by_room.get(room_id, []),
|
||||||
account_data=self.account_data_for_room(
|
account_data=self.account_data_for_room(
|
||||||
room_id, tags_by_room
|
room_id, tags_by_room, account_data_by_room
|
||||||
),
|
),
|
||||||
))
|
))
|
||||||
|
|
||||||
def account_data_for_room(self, room_id, tags_by_room):
|
def account_data_for_user(self, account_data):
|
||||||
account_data = []
|
account_data_events = []
|
||||||
|
|
||||||
|
for account_data_type, content in account_data.items():
|
||||||
|
account_data_events.append({
|
||||||
|
"type": account_data_type,
|
||||||
|
"content": content,
|
||||||
|
})
|
||||||
|
|
||||||
|
return account_data_events
|
||||||
|
|
||||||
|
def account_data_for_room(self, room_id, tags_by_room, account_data_by_room):
|
||||||
|
account_data_events = []
|
||||||
tags = tags_by_room.get(room_id)
|
tags = tags_by_room.get(room_id)
|
||||||
if tags is not None:
|
if tags is not None:
|
||||||
account_data.append({
|
account_data_events.append({
|
||||||
"type": "m.tag",
|
"type": "m.tag",
|
||||||
"content": {"tags": tags},
|
"content": {"tags": tags},
|
||||||
})
|
})
|
||||||
return account_data
|
|
||||||
|
account_data = account_data_by_room.get(room_id, {})
|
||||||
|
for account_data_type, content in account_data.items():
|
||||||
|
account_data_events.append({
|
||||||
|
"type": account_data_type,
|
||||||
|
"content": content,
|
||||||
|
})
|
||||||
|
|
||||||
|
return account_data_events
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def ephemeral_by_room(self, sync_config, now_token, since_token=None):
|
def ephemeral_by_room(self, sync_config, now_token, room_ids,
|
||||||
|
since_token=None):
|
||||||
"""Get the ephemeral events for each room the user is in
|
"""Get the ephemeral events for each room the user is in
|
||||||
Args:
|
Args:
|
||||||
sync_config (SyncConfig): The flags, filters and user for the sync.
|
sync_config (SyncConfig): The flags, filters and user for the sync.
|
||||||
now_token (StreamToken): Where the server is currently up to.
|
now_token (StreamToken): Where the server is currently up to.
|
||||||
|
room_ids (list): List of room id strings to get data for.
|
||||||
since_token (StreamToken): Where the server was when the client
|
since_token (StreamToken): Where the server was when the client
|
||||||
last synced.
|
last synced.
|
||||||
Returns:
|
Returns:
|
||||||
|
@ -292,9 +373,6 @@ class SyncHandler(BaseHandler):
|
||||||
|
|
||||||
typing_key = since_token.typing_key if since_token else "0"
|
typing_key = since_token.typing_key if since_token else "0"
|
||||||
|
|
||||||
rooms = yield self.store.get_rooms_for_user(sync_config.user.to_string())
|
|
||||||
room_ids = [room.room_id for room in rooms]
|
|
||||||
|
|
||||||
typing_source = self.event_sources.sources["typing"]
|
typing_source = self.event_sources.sources["typing"]
|
||||||
typing, typing_key = yield typing_source.get_new_events(
|
typing, typing_key = yield typing_source.get_new_events(
|
||||||
user=sync_config.user,
|
user=sync_config.user,
|
||||||
|
@ -341,7 +419,8 @@ class SyncHandler(BaseHandler):
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def full_state_sync_for_archived_room(self, room_id, sync_config,
|
def full_state_sync_for_archived_room(self, room_id, sync_config,
|
||||||
leave_event_id, leave_token,
|
leave_event_id, leave_token,
|
||||||
timeline_since_token, tags_by_room):
|
timeline_since_token, tags_by_room,
|
||||||
|
account_data_by_room):
|
||||||
"""Sync a room for a client which is starting without any state
|
"""Sync a room for a client which is starting without any state
|
||||||
Returns:
|
Returns:
|
||||||
A Deferred JoinedSyncResult.
|
A Deferred JoinedSyncResult.
|
||||||
|
@ -358,7 +437,7 @@ class SyncHandler(BaseHandler):
|
||||||
timeline=batch,
|
timeline=batch,
|
||||||
state=leave_state,
|
state=leave_state,
|
||||||
account_data=self.account_data_for_room(
|
account_data=self.account_data_for_room(
|
||||||
room_id, tags_by_room
|
room_id, tags_by_room, account_data_by_room
|
||||||
),
|
),
|
||||||
))
|
))
|
||||||
|
|
||||||
|
@ -371,8 +450,38 @@ class SyncHandler(BaseHandler):
|
||||||
"""
|
"""
|
||||||
now_token = yield self.event_sources.get_current_token()
|
now_token = yield self.event_sources.get_current_token()
|
||||||
|
|
||||||
rooms = yield self.store.get_rooms_for_user(sync_config.user.to_string())
|
if sync_config.is_guest:
|
||||||
room_ids = [room.room_id for room in rooms]
|
room_ids = sync_config.filter.list_rooms()
|
||||||
|
|
||||||
|
tags_by_room = {}
|
||||||
|
account_data = {}
|
||||||
|
account_data_by_room = {}
|
||||||
|
|
||||||
|
else:
|
||||||
|
rooms = yield self.store.get_rooms_for_user(
|
||||||
|
sync_config.user.to_string()
|
||||||
|
)
|
||||||
|
room_ids = [room.room_id for room in rooms]
|
||||||
|
|
||||||
|
now_token, ephemeral_by_room = yield self.ephemeral_by_room(
|
||||||
|
sync_config, now_token, since_token
|
||||||
|
)
|
||||||
|
|
||||||
|
tags_by_room = yield self.store.get_updated_tags(
|
||||||
|
sync_config.user.to_string(),
|
||||||
|
since_token.account_data_key,
|
||||||
|
)
|
||||||
|
|
||||||
|
account_data, account_data_by_room = (
|
||||||
|
yield self.store.get_updated_account_data_for_user(
|
||||||
|
sync_config.user.to_string(),
|
||||||
|
since_token.account_data_key,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
now_token, ephemeral_by_room = yield self.ephemeral_by_room(
|
||||||
|
sync_config, now_token, room_ids, since_token
|
||||||
|
)
|
||||||
|
|
||||||
presence_source = self.event_sources.sources["presence"]
|
presence_source = self.event_sources.sources["presence"]
|
||||||
presence, presence_key = yield presence_source.get_new_events(
|
presence, presence_key = yield presence_source.get_new_events(
|
||||||
|
@ -380,15 +489,10 @@ class SyncHandler(BaseHandler):
|
||||||
from_key=since_token.presence_key,
|
from_key=since_token.presence_key,
|
||||||
limit=sync_config.filter.presence_limit(),
|
limit=sync_config.filter.presence_limit(),
|
||||||
room_ids=room_ids,
|
room_ids=room_ids,
|
||||||
# /sync doesn't support guest access, they can't get to this point in code
|
is_guest=sync_config.is_guest,
|
||||||
is_guest=False,
|
|
||||||
)
|
)
|
||||||
now_token = now_token.copy_and_replace("presence_key", presence_key)
|
now_token = now_token.copy_and_replace("presence_key", presence_key)
|
||||||
|
|
||||||
now_token, ephemeral_by_room = yield self.ephemeral_by_room(
|
|
||||||
sync_config, now_token, since_token
|
|
||||||
)
|
|
||||||
|
|
||||||
rm_handler = self.hs.get_handlers().room_member_handler
|
rm_handler = self.hs.get_handlers().room_member_handler
|
||||||
app_service = yield self.store.get_app_service_by_user_id(
|
app_service = yield self.store.get_app_service_by_user_id(
|
||||||
sync_config.user.to_string()
|
sync_config.user.to_string()
|
||||||
|
@ -408,11 +512,8 @@ class SyncHandler(BaseHandler):
|
||||||
from_key=since_token.room_key,
|
from_key=since_token.room_key,
|
||||||
to_key=now_token.room_key,
|
to_key=now_token.room_key,
|
||||||
limit=timeline_limit + 1,
|
limit=timeline_limit + 1,
|
||||||
)
|
room_ids=room_ids if sync_config.is_guest else (),
|
||||||
|
is_guest=sync_config.is_guest,
|
||||||
tags_by_room = yield self.store.get_updated_tags(
|
|
||||||
sync_config.user.to_string(),
|
|
||||||
since_token.account_data_key,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
joined = []
|
joined = []
|
||||||
|
@ -469,7 +570,7 @@ class SyncHandler(BaseHandler):
|
||||||
state=state,
|
state=state,
|
||||||
ephemeral=ephemeral_by_room.get(room_id, []),
|
ephemeral=ephemeral_by_room.get(room_id, []),
|
||||||
account_data=self.account_data_for_room(
|
account_data=self.account_data_for_room(
|
||||||
room_id, tags_by_room
|
room_id, tags_by_room, account_data_by_room
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
logger.debug("Result for room %s: %r", room_id, room_sync)
|
logger.debug("Result for room %s: %r", room_id, room_sync)
|
||||||
|
@ -492,14 +593,15 @@ class SyncHandler(BaseHandler):
|
||||||
for room_id in joined_room_ids:
|
for room_id in joined_room_ids:
|
||||||
room_sync = yield self.incremental_sync_with_gap_for_room(
|
room_sync = yield self.incremental_sync_with_gap_for_room(
|
||||||
room_id, sync_config, since_token, now_token,
|
room_id, sync_config, since_token, now_token,
|
||||||
ephemeral_by_room, tags_by_room
|
ephemeral_by_room, tags_by_room, account_data_by_room
|
||||||
)
|
)
|
||||||
if room_sync:
|
if room_sync:
|
||||||
joined.append(room_sync)
|
joined.append(room_sync)
|
||||||
|
|
||||||
for leave_event in leave_events:
|
for leave_event in leave_events:
|
||||||
room_sync = yield self.incremental_sync_for_archived_room(
|
room_sync = yield self.incremental_sync_for_archived_room(
|
||||||
sync_config, leave_event, since_token, tags_by_room
|
sync_config, leave_event, since_token, tags_by_room,
|
||||||
|
account_data_by_room
|
||||||
)
|
)
|
||||||
archived.append(room_sync)
|
archived.append(room_sync)
|
||||||
|
|
||||||
|
@ -510,6 +612,7 @@ class SyncHandler(BaseHandler):
|
||||||
|
|
||||||
defer.returnValue(SyncResult(
|
defer.returnValue(SyncResult(
|
||||||
presence=presence,
|
presence=presence,
|
||||||
|
account_data=self.account_data_for_user(account_data),
|
||||||
joined=joined,
|
joined=joined,
|
||||||
invited=invited,
|
invited=invited,
|
||||||
archived=archived,
|
archived=archived,
|
||||||
|
@ -542,7 +645,10 @@ class SyncHandler(BaseHandler):
|
||||||
end_key = "s" + room_key.split('-')[-1]
|
end_key = "s" + room_key.split('-')[-1]
|
||||||
loaded_recents = sync_config.filter.filter_room_timeline(events)
|
loaded_recents = sync_config.filter.filter_room_timeline(events)
|
||||||
loaded_recents = yield self._filter_events_for_client(
|
loaded_recents = yield self._filter_events_for_client(
|
||||||
sync_config.user.to_string(), loaded_recents,
|
sync_config.user.to_string(),
|
||||||
|
loaded_recents,
|
||||||
|
is_guest=sync_config.is_guest,
|
||||||
|
require_all_visible_for_guests=False
|
||||||
)
|
)
|
||||||
loaded_recents.extend(recents)
|
loaded_recents.extend(recents)
|
||||||
recents = loaded_recents
|
recents = loaded_recents
|
||||||
|
@ -566,7 +672,8 @@ class SyncHandler(BaseHandler):
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def incremental_sync_with_gap_for_room(self, room_id, sync_config,
|
def incremental_sync_with_gap_for_room(self, room_id, sync_config,
|
||||||
since_token, now_token,
|
since_token, now_token,
|
||||||
ephemeral_by_room, tags_by_room):
|
ephemeral_by_room, tags_by_room,
|
||||||
|
account_data_by_room):
|
||||||
""" Get the incremental delta needed to bring the client up to date for
|
""" Get the incremental delta needed to bring the client up to date for
|
||||||
the room. Gives the client the most recent events and the changes to
|
the room. Gives the client the most recent events and the changes to
|
||||||
state.
|
state.
|
||||||
|
@ -606,7 +713,7 @@ class SyncHandler(BaseHandler):
|
||||||
state=state,
|
state=state,
|
||||||
ephemeral=ephemeral_by_room.get(room_id, []),
|
ephemeral=ephemeral_by_room.get(room_id, []),
|
||||||
account_data=self.account_data_for_room(
|
account_data=self.account_data_for_room(
|
||||||
room_id, tags_by_room
|
room_id, tags_by_room, account_data_by_room
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -616,7 +723,8 @@ class SyncHandler(BaseHandler):
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def incremental_sync_for_archived_room(self, sync_config, leave_event,
|
def incremental_sync_for_archived_room(self, sync_config, leave_event,
|
||||||
since_token, tags_by_room):
|
since_token, tags_by_room,
|
||||||
|
account_data_by_room):
|
||||||
""" Get the incremental delta needed to bring the client up to date for
|
""" Get the incremental delta needed to bring the client up to date for
|
||||||
the archived room.
|
the archived room.
|
||||||
Returns:
|
Returns:
|
||||||
|
@ -654,7 +762,7 @@ class SyncHandler(BaseHandler):
|
||||||
timeline=batch,
|
timeline=batch,
|
||||||
state=state_events_delta,
|
state=state_events_delta,
|
||||||
account_data=self.account_data_for_room(
|
account_data=self.account_data_for_room(
|
||||||
leave_event.room_id, tags_by_room
|
leave_event.room_id, tags_by_room, account_data_by_room
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -15,7 +15,7 @@
|
||||||
|
|
||||||
|
|
||||||
from synapse.api.errors import (
|
from synapse.api.errors import (
|
||||||
cs_exception, SynapseError, CodeMessageException, UnrecognizedRequestError
|
cs_exception, SynapseError, CodeMessageException, UnrecognizedRequestError, Codes
|
||||||
)
|
)
|
||||||
from synapse.util.logcontext import LoggingContext, PreserveLoggingContext
|
from synapse.util.logcontext import LoggingContext, PreserveLoggingContext
|
||||||
import synapse.metrics
|
import synapse.metrics
|
||||||
|
@ -53,6 +53,23 @@ response_timer = metrics.register_distribution(
|
||||||
labels=["method", "servlet"]
|
labels=["method", "servlet"]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
response_ru_utime = metrics.register_distribution(
|
||||||
|
"response_ru_utime", labels=["method", "servlet"]
|
||||||
|
)
|
||||||
|
|
||||||
|
response_ru_stime = metrics.register_distribution(
|
||||||
|
"response_ru_stime", labels=["method", "servlet"]
|
||||||
|
)
|
||||||
|
|
||||||
|
response_db_txn_count = metrics.register_distribution(
|
||||||
|
"response_db_txn_count", labels=["method", "servlet"]
|
||||||
|
)
|
||||||
|
|
||||||
|
response_db_txn_duration = metrics.register_distribution(
|
||||||
|
"response_db_txn_duration", labels=["method", "servlet"]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
_next_request_id = 0
|
_next_request_id = 0
|
||||||
|
|
||||||
|
|
||||||
|
@ -110,7 +127,10 @@ def request_handler(request_handler):
|
||||||
respond_with_json(
|
respond_with_json(
|
||||||
request,
|
request,
|
||||||
500,
|
500,
|
||||||
{"error": "Internal server error"},
|
{
|
||||||
|
"error": "Internal server error",
|
||||||
|
"errcode": Codes.UNKNOWN,
|
||||||
|
},
|
||||||
send_cors=True
|
send_cors=True
|
||||||
)
|
)
|
||||||
return wrapped_request_handler
|
return wrapped_request_handler
|
||||||
|
@ -120,7 +140,7 @@ class HttpServer(object):
|
||||||
""" Interface for registering callbacks on a HTTP server
|
""" Interface for registering callbacks on a HTTP server
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def register_path(self, method, path_pattern, callback):
|
def register_paths(self, method, path_patterns, callback):
|
||||||
""" Register a callback that gets fired if we receive a http request
|
""" Register a callback that gets fired if we receive a http request
|
||||||
with the given method for a path that matches the given regex.
|
with the given method for a path that matches the given regex.
|
||||||
|
|
||||||
|
@ -129,7 +149,7 @@ class HttpServer(object):
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
method (str): The method to listen to.
|
method (str): The method to listen to.
|
||||||
path_pattern (str): The regex used to match requests.
|
path_patterns (list<SRE_Pattern>): The regex used to match requests.
|
||||||
callback (function): The function to fire if we receive a matched
|
callback (function): The function to fire if we receive a matched
|
||||||
request. The first argument will be the request object and
|
request. The first argument will be the request object and
|
||||||
subsequent arguments will be any matched groups from the regex.
|
subsequent arguments will be any matched groups from the regex.
|
||||||
|
@ -165,10 +185,11 @@ class JsonResource(HttpServer, resource.Resource):
|
||||||
self.version_string = hs.version_string
|
self.version_string = hs.version_string
|
||||||
self.hs = hs
|
self.hs = hs
|
||||||
|
|
||||||
def register_path(self, method, path_pattern, callback):
|
def register_paths(self, method, path_patterns, callback):
|
||||||
self.path_regexs.setdefault(method, []).append(
|
for path_pattern in path_patterns:
|
||||||
self._PathEntry(path_pattern, callback)
|
self.path_regexs.setdefault(method, []).append(
|
||||||
)
|
self._PathEntry(path_pattern, callback)
|
||||||
|
)
|
||||||
|
|
||||||
def render(self, request):
|
def render(self, request):
|
||||||
""" This gets called by twisted every time someone sends us a request.
|
""" This gets called by twisted every time someone sends us a request.
|
||||||
|
@ -220,6 +241,21 @@ class JsonResource(HttpServer, resource.Resource):
|
||||||
self.clock.time_msec() - start, request.method, servlet_classname
|
self.clock.time_msec() - start, request.method, servlet_classname
|
||||||
)
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
context = LoggingContext.current_context()
|
||||||
|
ru_utime, ru_stime = context.get_resource_usage()
|
||||||
|
|
||||||
|
response_ru_utime.inc_by(ru_utime, request.method, servlet_classname)
|
||||||
|
response_ru_stime.inc_by(ru_stime, request.method, servlet_classname)
|
||||||
|
response_db_txn_count.inc_by(
|
||||||
|
context.db_txn_count, request.method, servlet_classname
|
||||||
|
)
|
||||||
|
response_db_txn_duration.inc_by(
|
||||||
|
context.db_txn_duration, request.method, servlet_classname
|
||||||
|
)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
return
|
return
|
||||||
|
|
||||||
# Huh. No one wanted to handle that? Fiiiiiine. Send 400.
|
# Huh. No one wanted to handle that? Fiiiiiine. Send 400.
|
||||||
|
|
|
@ -19,7 +19,6 @@ from synapse.api.errors import SynapseError
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@ -102,12 +101,13 @@ class RestServlet(object):
|
||||||
|
|
||||||
def register(self, http_server):
|
def register(self, http_server):
|
||||||
""" Register this servlet with the given HTTP server. """
|
""" Register this servlet with the given HTTP server. """
|
||||||
if hasattr(self, "PATTERN"):
|
if hasattr(self, "PATTERNS"):
|
||||||
pattern = self.PATTERN
|
patterns = self.PATTERNS
|
||||||
|
|
||||||
for method in ("GET", "PUT", "POST", "OPTIONS", "DELETE"):
|
for method in ("GET", "PUT", "POST", "OPTIONS", "DELETE"):
|
||||||
if hasattr(self, "on_%s" % (method,)):
|
if hasattr(self, "on_%s" % (method,)):
|
||||||
method_handler = getattr(self, "on_%s" % (method,))
|
method_handler = getattr(self, "on_%s" % (method,))
|
||||||
http_server.register_path(method, pattern, method_handler)
|
http_server.register_paths(method, patterns, method_handler)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
raise NotImplementedError("RestServlet must register something.")
|
raise NotImplementedError("RestServlet must register something.")
|
||||||
|
|
|
@ -349,7 +349,7 @@ class Notifier(object):
|
||||||
room_ids = []
|
room_ids = []
|
||||||
if is_guest:
|
if is_guest:
|
||||||
if guest_room_id:
|
if guest_room_id:
|
||||||
if not self._is_world_readable(guest_room_id):
|
if not (yield self._is_world_readable(guest_room_id)):
|
||||||
raise AuthError(403, "Guest access not allowed")
|
raise AuthError(403, "Guest access not allowed")
|
||||||
room_ids = [guest_room_id]
|
room_ids = [guest_room_id]
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -16,14 +16,12 @@
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
from synapse.streams.config import PaginationConfig
|
from synapse.streams.config import PaginationConfig
|
||||||
from synapse.types import StreamToken, UserID
|
from synapse.types import StreamToken
|
||||||
|
|
||||||
import synapse.util.async
|
import synapse.util.async
|
||||||
import baserules
|
import push_rule_evaluator as push_rule_evaluator
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import simplejson as json
|
|
||||||
import re
|
|
||||||
import random
|
import random
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -33,9 +31,6 @@ class Pusher(object):
|
||||||
INITIAL_BACKOFF = 1000
|
INITIAL_BACKOFF = 1000
|
||||||
MAX_BACKOFF = 60 * 60 * 1000
|
MAX_BACKOFF = 60 * 60 * 1000
|
||||||
GIVE_UP_AFTER = 24 * 60 * 60 * 1000
|
GIVE_UP_AFTER = 24 * 60 * 60 * 1000
|
||||||
DEFAULT_ACTIONS = ['dont_notify']
|
|
||||||
|
|
||||||
INEQUALITY_EXPR = re.compile("^([=<>]*)([0-9]*)$")
|
|
||||||
|
|
||||||
def __init__(self, _hs, profile_tag, user_name, app_id,
|
def __init__(self, _hs, profile_tag, user_name, app_id,
|
||||||
app_display_name, device_display_name, pushkey, pushkey_ts,
|
app_display_name, device_display_name, pushkey, pushkey_ts,
|
||||||
|
@ -62,161 +57,6 @@ class Pusher(object):
|
||||||
self.last_last_active_time = 0
|
self.last_last_active_time = 0
|
||||||
self.has_unread = True
|
self.has_unread = True
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
|
||||||
def _actions_for_event(self, ev):
|
|
||||||
"""
|
|
||||||
This should take into account notification settings that the user
|
|
||||||
has configured both globally and per-room when we have the ability
|
|
||||||
to do such things.
|
|
||||||
"""
|
|
||||||
if ev['user_id'] == self.user_name:
|
|
||||||
# let's assume you probably know about messages you sent yourself
|
|
||||||
defer.returnValue(['dont_notify'])
|
|
||||||
|
|
||||||
rawrules = yield self.store.get_push_rules_for_user(self.user_name)
|
|
||||||
|
|
||||||
rules = []
|
|
||||||
for rawrule in rawrules:
|
|
||||||
rule = dict(rawrule)
|
|
||||||
rule['conditions'] = json.loads(rawrule['conditions'])
|
|
||||||
rule['actions'] = json.loads(rawrule['actions'])
|
|
||||||
rules.append(rule)
|
|
||||||
|
|
||||||
enabled_map = yield self.store.get_push_rules_enabled_for_user(self.user_name)
|
|
||||||
|
|
||||||
user = UserID.from_string(self.user_name)
|
|
||||||
|
|
||||||
rules = baserules.list_with_base_rules(rules, user)
|
|
||||||
|
|
||||||
room_id = ev['room_id']
|
|
||||||
|
|
||||||
# get *our* member event for display name matching
|
|
||||||
my_display_name = None
|
|
||||||
our_member_event = yield self.store.get_current_state(
|
|
||||||
room_id=room_id,
|
|
||||||
event_type='m.room.member',
|
|
||||||
state_key=self.user_name,
|
|
||||||
)
|
|
||||||
if our_member_event:
|
|
||||||
my_display_name = our_member_event[0].content.get("displayname")
|
|
||||||
|
|
||||||
room_members = yield self.store.get_users_in_room(room_id)
|
|
||||||
room_member_count = len(room_members)
|
|
||||||
|
|
||||||
for r in rules:
|
|
||||||
if r['rule_id'] in enabled_map:
|
|
||||||
r['enabled'] = enabled_map[r['rule_id']]
|
|
||||||
elif 'enabled' not in r:
|
|
||||||
r['enabled'] = True
|
|
||||||
if not r['enabled']:
|
|
||||||
continue
|
|
||||||
matches = True
|
|
||||||
|
|
||||||
conditions = r['conditions']
|
|
||||||
actions = r['actions']
|
|
||||||
|
|
||||||
for c in conditions:
|
|
||||||
matches &= self._event_fulfills_condition(
|
|
||||||
ev, c, display_name=my_display_name,
|
|
||||||
room_member_count=room_member_count
|
|
||||||
)
|
|
||||||
logger.debug(
|
|
||||||
"Rule %s %s",
|
|
||||||
r['rule_id'], "matches" if matches else "doesn't match"
|
|
||||||
)
|
|
||||||
# ignore rules with no actions (we have an explict 'dont_notify')
|
|
||||||
if len(actions) == 0:
|
|
||||||
logger.warn(
|
|
||||||
"Ignoring rule id %s with no actions for user %s",
|
|
||||||
r['rule_id'], self.user_name
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
if matches:
|
|
||||||
logger.info(
|
|
||||||
"%s matches for user %s, event %s",
|
|
||||||
r['rule_id'], self.user_name, ev['event_id']
|
|
||||||
)
|
|
||||||
defer.returnValue(actions)
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
"No rules match for user %s, event %s",
|
|
||||||
self.user_name, ev['event_id']
|
|
||||||
)
|
|
||||||
defer.returnValue(Pusher.DEFAULT_ACTIONS)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _glob_to_regexp(glob):
|
|
||||||
r = re.escape(glob)
|
|
||||||
r = re.sub(r'\\\*', r'.*?', r)
|
|
||||||
r = re.sub(r'\\\?', r'.', r)
|
|
||||||
|
|
||||||
# handle [abc], [a-z] and [!a-z] style ranges.
|
|
||||||
r = re.sub(r'\\\[(\\\!|)(.*)\\\]',
|
|
||||||
lambda x: ('[%s%s]' % (x.group(1) and '^' or '',
|
|
||||||
re.sub(r'\\\-', '-', x.group(2)))), r)
|
|
||||||
return r
|
|
||||||
|
|
||||||
def _event_fulfills_condition(self, ev, condition, display_name, room_member_count):
|
|
||||||
if condition['kind'] == 'event_match':
|
|
||||||
if 'pattern' not in condition:
|
|
||||||
logger.warn("event_match condition with no pattern")
|
|
||||||
return False
|
|
||||||
# XXX: optimisation: cache our pattern regexps
|
|
||||||
if condition['key'] == 'content.body':
|
|
||||||
r = r'\b%s\b' % self._glob_to_regexp(condition['pattern'])
|
|
||||||
else:
|
|
||||||
r = r'^%s$' % self._glob_to_regexp(condition['pattern'])
|
|
||||||
val = _value_for_dotted_key(condition['key'], ev)
|
|
||||||
if val is None:
|
|
||||||
return False
|
|
||||||
return re.search(r, val, flags=re.IGNORECASE) is not None
|
|
||||||
|
|
||||||
elif condition['kind'] == 'device':
|
|
||||||
if 'profile_tag' not in condition:
|
|
||||||
return True
|
|
||||||
return condition['profile_tag'] == self.profile_tag
|
|
||||||
|
|
||||||
elif condition['kind'] == 'contains_display_name':
|
|
||||||
# This is special because display names can be different
|
|
||||||
# between rooms and so you can't really hard code it in a rule.
|
|
||||||
# Optimisation: we should cache these names and update them from
|
|
||||||
# the event stream.
|
|
||||||
if 'content' not in ev or 'body' not in ev['content']:
|
|
||||||
return False
|
|
||||||
if not display_name:
|
|
||||||
return False
|
|
||||||
return re.search(
|
|
||||||
r"\b%s\b" % re.escape(display_name), ev['content']['body'],
|
|
||||||
flags=re.IGNORECASE
|
|
||||||
) is not None
|
|
||||||
|
|
||||||
elif condition['kind'] == 'room_member_count':
|
|
||||||
if 'is' not in condition:
|
|
||||||
return False
|
|
||||||
m = Pusher.INEQUALITY_EXPR.match(condition['is'])
|
|
||||||
if not m:
|
|
||||||
return False
|
|
||||||
ineq = m.group(1)
|
|
||||||
rhs = m.group(2)
|
|
||||||
if not rhs.isdigit():
|
|
||||||
return False
|
|
||||||
rhs = int(rhs)
|
|
||||||
|
|
||||||
if ineq == '' or ineq == '==':
|
|
||||||
return room_member_count == rhs
|
|
||||||
elif ineq == '<':
|
|
||||||
return room_member_count < rhs
|
|
||||||
elif ineq == '>':
|
|
||||||
return room_member_count > rhs
|
|
||||||
elif ineq == '>=':
|
|
||||||
return room_member_count >= rhs
|
|
||||||
elif ineq == '<=':
|
|
||||||
return room_member_count <= rhs
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
else:
|
|
||||||
return True
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def get_context_for_event(self, ev):
|
def get_context_for_event(self, ev):
|
||||||
name_aliases = yield self.store.get_room_name_and_aliases(
|
name_aliases = yield self.store.get_room_name_and_aliases(
|
||||||
|
@ -308,8 +148,14 @@ class Pusher(object):
|
||||||
return
|
return
|
||||||
|
|
||||||
processed = False
|
processed = False
|
||||||
actions = yield self._actions_for_event(single_event)
|
|
||||||
tweaks = _tweaks_for_actions(actions)
|
rule_evaluator = yield \
|
||||||
|
push_rule_evaluator.evaluator_for_user_name_and_profile_tag(
|
||||||
|
self.user_name, self.profile_tag, single_event['room_id'], self.store
|
||||||
|
)
|
||||||
|
|
||||||
|
actions = yield rule_evaluator.actions_for_event(single_event)
|
||||||
|
tweaks = rule_evaluator.tweaks_for_actions(actions)
|
||||||
|
|
||||||
if len(actions) == 0:
|
if len(actions) == 0:
|
||||||
logger.warn("Empty actions! Using default action.")
|
logger.warn("Empty actions! Using default action.")
|
||||||
|
@ -448,27 +294,6 @@ class Pusher(object):
|
||||||
self.has_unread = False
|
self.has_unread = False
|
||||||
|
|
||||||
|
|
||||||
def _value_for_dotted_key(dotted_key, event):
|
|
||||||
parts = dotted_key.split(".")
|
|
||||||
val = event
|
|
||||||
while len(parts) > 0:
|
|
||||||
if parts[0] not in val:
|
|
||||||
return None
|
|
||||||
val = val[parts[0]]
|
|
||||||
parts = parts[1:]
|
|
||||||
return val
|
|
||||||
|
|
||||||
|
|
||||||
def _tweaks_for_actions(actions):
|
|
||||||
tweaks = {}
|
|
||||||
for a in actions:
|
|
||||||
if not isinstance(a, dict):
|
|
||||||
continue
|
|
||||||
if 'set_tweak' in a and 'value' in a:
|
|
||||||
tweaks[a['set_tweak']] = a['value']
|
|
||||||
return tweaks
|
|
||||||
|
|
||||||
|
|
||||||
class PusherConfigException(Exception):
|
class PusherConfigException(Exception):
|
||||||
def __init__(self, msg):
|
def __init__(self, msg):
|
||||||
super(PusherConfigException, self).__init__(msg)
|
super(PusherConfigException, self).__init__(msg)
|
||||||
|
|
|
@ -247,6 +247,7 @@ def make_base_append_underride_rules(user):
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
'rule_id': 'global/underride/.m.rule.message',
|
'rule_id': 'global/underride/.m.rule.message',
|
||||||
|
'enabled': False,
|
||||||
'conditions': [
|
'conditions': [
|
||||||
{
|
{
|
||||||
'kind': 'event_match',
|
'kind': 'event_match',
|
||||||
|
|
|
@ -14,7 +14,6 @@
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from synapse.push import Pusher, PusherConfigException
|
from synapse.push import Pusher, PusherConfigException
|
||||||
from synapse.http.client import SimpleHttpClient
|
|
||||||
|
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
|
@ -46,7 +45,7 @@ class HttpPusher(Pusher):
|
||||||
"'url' required in data for HTTP pusher"
|
"'url' required in data for HTTP pusher"
|
||||||
)
|
)
|
||||||
self.url = data['url']
|
self.url = data['url']
|
||||||
self.httpCli = SimpleHttpClient(self.hs)
|
self.http_client = _hs.get_simple_http_client()
|
||||||
self.data_minus_url = {}
|
self.data_minus_url = {}
|
||||||
self.data_minus_url.update(self.data)
|
self.data_minus_url.update(self.data)
|
||||||
del self.data_minus_url['url']
|
del self.data_minus_url['url']
|
||||||
|
@ -107,7 +106,7 @@ class HttpPusher(Pusher):
|
||||||
if not notification_dict:
|
if not notification_dict:
|
||||||
defer.returnValue([])
|
defer.returnValue([])
|
||||||
try:
|
try:
|
||||||
resp = yield self.httpCli.post_json_get_json(self.url, notification_dict)
|
resp = yield self.http_client.post_json_get_json(self.url, notification_dict)
|
||||||
except:
|
except:
|
||||||
logger.warn("Failed to push %s ", self.url)
|
logger.warn("Failed to push %s ", self.url)
|
||||||
defer.returnValue(False)
|
defer.returnValue(False)
|
||||||
|
@ -138,7 +137,7 @@ class HttpPusher(Pusher):
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
try:
|
try:
|
||||||
resp = yield self.httpCli.post_json_get_json(self.url, d)
|
resp = yield self.http_client.post_json_get_json(self.url, d)
|
||||||
except:
|
except:
|
||||||
logger.exception("Failed to push %s ", self.url)
|
logger.exception("Failed to push %s ", self.url)
|
||||||
defer.returnValue(False)
|
defer.returnValue(False)
|
||||||
|
|
224
synapse/push/push_rule_evaluator.py
Normal file
224
synapse/push/push_rule_evaluator.py
Normal file
|
@ -0,0 +1,224 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Copyright 2015 OpenMarket Ltd
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from twisted.internet import defer
|
||||||
|
|
||||||
|
from synapse.types import UserID
|
||||||
|
|
||||||
|
import baserules
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import simplejson as json
|
||||||
|
import re
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def evaluator_for_user_name_and_profile_tag(user_name, profile_tag, room_id, store):
|
||||||
|
rawrules = yield store.get_push_rules_for_user(user_name)
|
||||||
|
enabled_map = yield store.get_push_rules_enabled_for_user(user_name)
|
||||||
|
our_member_event = yield store.get_current_state(
|
||||||
|
room_id=room_id,
|
||||||
|
event_type='m.room.member',
|
||||||
|
state_key=user_name,
|
||||||
|
)
|
||||||
|
|
||||||
|
defer.returnValue(PushRuleEvaluator(
|
||||||
|
user_name, profile_tag, rawrules, enabled_map,
|
||||||
|
room_id, our_member_event, store
|
||||||
|
))
|
||||||
|
|
||||||
|
|
||||||
|
class PushRuleEvaluator:
|
||||||
|
DEFAULT_ACTIONS = ['dont_notify']
|
||||||
|
INEQUALITY_EXPR = re.compile("^([=<>]*)([0-9]*)$")
|
||||||
|
|
||||||
|
def __init__(self, user_name, profile_tag, raw_rules, enabled_map, room_id,
|
||||||
|
our_member_event, store):
|
||||||
|
self.user_name = user_name
|
||||||
|
self.profile_tag = profile_tag
|
||||||
|
self.room_id = room_id
|
||||||
|
self.our_member_event = our_member_event
|
||||||
|
self.store = store
|
||||||
|
|
||||||
|
rules = []
|
||||||
|
for raw_rule in raw_rules:
|
||||||
|
rule = dict(raw_rule)
|
||||||
|
rule['conditions'] = json.loads(raw_rule['conditions'])
|
||||||
|
rule['actions'] = json.loads(raw_rule['actions'])
|
||||||
|
rules.append(rule)
|
||||||
|
|
||||||
|
user = UserID.from_string(self.user_name)
|
||||||
|
self.rules = baserules.list_with_base_rules(rules, user)
|
||||||
|
|
||||||
|
self.enabled_map = enabled_map
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def tweaks_for_actions(actions):
|
||||||
|
tweaks = {}
|
||||||
|
for a in actions:
|
||||||
|
if not isinstance(a, dict):
|
||||||
|
continue
|
||||||
|
if 'set_tweak' in a and 'value' in a:
|
||||||
|
tweaks[a['set_tweak']] = a['value']
|
||||||
|
return tweaks
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def actions_for_event(self, ev):
|
||||||
|
"""
|
||||||
|
This should take into account notification settings that the user
|
||||||
|
has configured both globally and per-room when we have the ability
|
||||||
|
to do such things.
|
||||||
|
"""
|
||||||
|
if ev['user_id'] == self.user_name:
|
||||||
|
# let's assume you probably know about messages you sent yourself
|
||||||
|
defer.returnValue(['dont_notify'])
|
||||||
|
|
||||||
|
room_id = ev['room_id']
|
||||||
|
|
||||||
|
# get *our* member event for display name matching
|
||||||
|
my_display_name = None
|
||||||
|
|
||||||
|
if self.our_member_event:
|
||||||
|
my_display_name = self.our_member_event[0].content.get("displayname")
|
||||||
|
|
||||||
|
room_members = yield self.store.get_users_in_room(room_id)
|
||||||
|
room_member_count = len(room_members)
|
||||||
|
|
||||||
|
for r in self.rules:
|
||||||
|
if r['rule_id'] in self.enabled_map:
|
||||||
|
r['enabled'] = self.enabled_map[r['rule_id']]
|
||||||
|
elif 'enabled' not in r:
|
||||||
|
r['enabled'] = True
|
||||||
|
if not r['enabled']:
|
||||||
|
continue
|
||||||
|
matches = True
|
||||||
|
|
||||||
|
conditions = r['conditions']
|
||||||
|
actions = r['actions']
|
||||||
|
|
||||||
|
for c in conditions:
|
||||||
|
matches &= self._event_fulfills_condition(
|
||||||
|
ev, c, display_name=my_display_name,
|
||||||
|
room_member_count=room_member_count
|
||||||
|
)
|
||||||
|
logger.debug(
|
||||||
|
"Rule %s %s",
|
||||||
|
r['rule_id'], "matches" if matches else "doesn't match"
|
||||||
|
)
|
||||||
|
# ignore rules with no actions (we have an explict 'dont_notify')
|
||||||
|
if len(actions) == 0:
|
||||||
|
logger.warn(
|
||||||
|
"Ignoring rule id %s with no actions for user %s",
|
||||||
|
r['rule_id'], self.user_name
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
if matches:
|
||||||
|
logger.info(
|
||||||
|
"%s matches for user %s, event %s",
|
||||||
|
r['rule_id'], self.user_name, ev['event_id']
|
||||||
|
)
|
||||||
|
defer.returnValue(actions)
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
"No rules match for user %s, event %s",
|
||||||
|
self.user_name, ev['event_id']
|
||||||
|
)
|
||||||
|
defer.returnValue(PushRuleEvaluator.DEFAULT_ACTIONS)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _glob_to_regexp(glob):
|
||||||
|
r = re.escape(glob)
|
||||||
|
r = re.sub(r'\\\*', r'.*?', r)
|
||||||
|
r = re.sub(r'\\\?', r'.', r)
|
||||||
|
|
||||||
|
# handle [abc], [a-z] and [!a-z] style ranges.
|
||||||
|
r = re.sub(r'\\\[(\\\!|)(.*)\\\]',
|
||||||
|
lambda x: ('[%s%s]' % (x.group(1) and '^' or '',
|
||||||
|
re.sub(r'\\\-', '-', x.group(2)))), r)
|
||||||
|
return r
|
||||||
|
|
||||||
|
def _event_fulfills_condition(self, ev, condition, display_name, room_member_count):
|
||||||
|
if condition['kind'] == 'event_match':
|
||||||
|
if 'pattern' not in condition:
|
||||||
|
logger.warn("event_match condition with no pattern")
|
||||||
|
return False
|
||||||
|
# XXX: optimisation: cache our pattern regexps
|
||||||
|
if condition['key'] == 'content.body':
|
||||||
|
r = r'\b%s\b' % self._glob_to_regexp(condition['pattern'])
|
||||||
|
else:
|
||||||
|
r = r'^%s$' % self._glob_to_regexp(condition['pattern'])
|
||||||
|
val = _value_for_dotted_key(condition['key'], ev)
|
||||||
|
if val is None:
|
||||||
|
return False
|
||||||
|
return re.search(r, val, flags=re.IGNORECASE) is not None
|
||||||
|
|
||||||
|
elif condition['kind'] == 'device':
|
||||||
|
if 'profile_tag' not in condition:
|
||||||
|
return True
|
||||||
|
return condition['profile_tag'] == self.profile_tag
|
||||||
|
|
||||||
|
elif condition['kind'] == 'contains_display_name':
|
||||||
|
# This is special because display names can be different
|
||||||
|
# between rooms and so you can't really hard code it in a rule.
|
||||||
|
# Optimisation: we should cache these names and update them from
|
||||||
|
# the event stream.
|
||||||
|
if 'content' not in ev or 'body' not in ev['content']:
|
||||||
|
return False
|
||||||
|
if not display_name:
|
||||||
|
return False
|
||||||
|
return re.search(
|
||||||
|
r"\b%s\b" % re.escape(display_name), ev['content']['body'],
|
||||||
|
flags=re.IGNORECASE
|
||||||
|
) is not None
|
||||||
|
|
||||||
|
elif condition['kind'] == 'room_member_count':
|
||||||
|
if 'is' not in condition:
|
||||||
|
return False
|
||||||
|
m = PushRuleEvaluator.INEQUALITY_EXPR.match(condition['is'])
|
||||||
|
if not m:
|
||||||
|
return False
|
||||||
|
ineq = m.group(1)
|
||||||
|
rhs = m.group(2)
|
||||||
|
if not rhs.isdigit():
|
||||||
|
return False
|
||||||
|
rhs = int(rhs)
|
||||||
|
|
||||||
|
if ineq == '' or ineq == '==':
|
||||||
|
return room_member_count == rhs
|
||||||
|
elif ineq == '<':
|
||||||
|
return room_member_count < rhs
|
||||||
|
elif ineq == '>':
|
||||||
|
return room_member_count > rhs
|
||||||
|
elif ineq == '>=':
|
||||||
|
return room_member_count >= rhs
|
||||||
|
elif ineq == '<=':
|
||||||
|
return room_member_count <= rhs
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def _value_for_dotted_key(dotted_key, event):
|
||||||
|
parts = dotted_key.split(".")
|
||||||
|
val = event
|
||||||
|
while len(parts) > 0:
|
||||||
|
if parts[0] not in val:
|
||||||
|
return None
|
||||||
|
val = val[parts[0]]
|
||||||
|
parts = parts[1:]
|
||||||
|
return val
|
|
@ -1,5 +1,5 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2015 OpenMarket Ltd
|
# Copyright 2014, 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
@ -12,3 +12,69 @@
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
from synapse.rest.client.v1 import (
|
||||||
|
room,
|
||||||
|
events,
|
||||||
|
profile,
|
||||||
|
presence,
|
||||||
|
initial_sync,
|
||||||
|
directory,
|
||||||
|
voip,
|
||||||
|
admin,
|
||||||
|
pusher,
|
||||||
|
push_rule,
|
||||||
|
register as v1_register,
|
||||||
|
login as v1_login,
|
||||||
|
)
|
||||||
|
|
||||||
|
from synapse.rest.client.v2_alpha import (
|
||||||
|
sync,
|
||||||
|
filter,
|
||||||
|
account,
|
||||||
|
register,
|
||||||
|
auth,
|
||||||
|
receipts,
|
||||||
|
keys,
|
||||||
|
tokenrefresh,
|
||||||
|
tags,
|
||||||
|
account_data,
|
||||||
|
)
|
||||||
|
|
||||||
|
from synapse.http.server import JsonResource
|
||||||
|
|
||||||
|
|
||||||
|
class ClientRestResource(JsonResource):
|
||||||
|
"""A resource for version 1 of the matrix client API."""
|
||||||
|
|
||||||
|
def __init__(self, hs):
|
||||||
|
JsonResource.__init__(self, hs, canonical_json=False)
|
||||||
|
self.register_servlets(self, hs)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def register_servlets(client_resource, hs):
|
||||||
|
# "v1"
|
||||||
|
room.register_servlets(hs, client_resource)
|
||||||
|
events.register_servlets(hs, client_resource)
|
||||||
|
v1_register.register_servlets(hs, client_resource)
|
||||||
|
v1_login.register_servlets(hs, client_resource)
|
||||||
|
profile.register_servlets(hs, client_resource)
|
||||||
|
presence.register_servlets(hs, client_resource)
|
||||||
|
initial_sync.register_servlets(hs, client_resource)
|
||||||
|
directory.register_servlets(hs, client_resource)
|
||||||
|
voip.register_servlets(hs, client_resource)
|
||||||
|
admin.register_servlets(hs, client_resource)
|
||||||
|
pusher.register_servlets(hs, client_resource)
|
||||||
|
push_rule.register_servlets(hs, client_resource)
|
||||||
|
|
||||||
|
# "v2"
|
||||||
|
sync.register_servlets(hs, client_resource)
|
||||||
|
filter.register_servlets(hs, client_resource)
|
||||||
|
account.register_servlets(hs, client_resource)
|
||||||
|
register.register_servlets(hs, client_resource)
|
||||||
|
auth.register_servlets(hs, client_resource)
|
||||||
|
receipts.register_servlets(hs, client_resource)
|
||||||
|
keys.register_servlets(hs, client_resource)
|
||||||
|
tokenrefresh.register_servlets(hs, client_resource)
|
||||||
|
tags.register_servlets(hs, client_resource)
|
||||||
|
account_data.register_servlets(hs, client_resource)
|
||||||
|
|
|
@ -12,33 +12,3 @@
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from . import (
|
|
||||||
room, events, register, login, profile, presence, initial_sync, directory,
|
|
||||||
voip, admin, pusher, push_rule
|
|
||||||
)
|
|
||||||
|
|
||||||
from synapse.http.server import JsonResource
|
|
||||||
|
|
||||||
|
|
||||||
class ClientV1RestResource(JsonResource):
|
|
||||||
"""A resource for version 1 of the matrix client API."""
|
|
||||||
|
|
||||||
def __init__(self, hs):
|
|
||||||
JsonResource.__init__(self, hs, canonical_json=False)
|
|
||||||
self.register_servlets(self, hs)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def register_servlets(client_resource, hs):
|
|
||||||
room.register_servlets(hs, client_resource)
|
|
||||||
events.register_servlets(hs, client_resource)
|
|
||||||
register.register_servlets(hs, client_resource)
|
|
||||||
login.register_servlets(hs, client_resource)
|
|
||||||
profile.register_servlets(hs, client_resource)
|
|
||||||
presence.register_servlets(hs, client_resource)
|
|
||||||
initial_sync.register_servlets(hs, client_resource)
|
|
||||||
directory.register_servlets(hs, client_resource)
|
|
||||||
voip.register_servlets(hs, client_resource)
|
|
||||||
admin.register_servlets(hs, client_resource)
|
|
||||||
pusher.register_servlets(hs, client_resource)
|
|
||||||
push_rule.register_servlets(hs, client_resource)
|
|
||||||
|
|
|
@ -18,7 +18,7 @@ from twisted.internet import defer
|
||||||
from synapse.api.errors import AuthError, SynapseError
|
from synapse.api.errors import AuthError, SynapseError
|
||||||
from synapse.types import UserID
|
from synapse.types import UserID
|
||||||
|
|
||||||
from base import ClientV1RestServlet, client_path_pattern
|
from base import ClientV1RestServlet, client_path_patterns
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
@ -26,7 +26,7 @@ logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class WhoisRestServlet(ClientV1RestServlet):
|
class WhoisRestServlet(ClientV1RestServlet):
|
||||||
PATTERN = client_path_pattern("/admin/whois/(?P<user_id>[^/]*)")
|
PATTERNS = client_path_patterns("/admin/whois/(?P<user_id>[^/]*)")
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def on_GET(self, request, user_id):
|
def on_GET(self, request, user_id):
|
||||||
|
|
|
@ -27,7 +27,7 @@ import logging
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def client_path_pattern(path_regex):
|
def client_path_patterns(path_regex, releases=(0,), include_in_unstable=True):
|
||||||
"""Creates a regex compiled client path with the correct client path
|
"""Creates a regex compiled client path with the correct client path
|
||||||
prefix.
|
prefix.
|
||||||
|
|
||||||
|
@ -37,7 +37,14 @@ def client_path_pattern(path_regex):
|
||||||
Returns:
|
Returns:
|
||||||
SRE_Pattern
|
SRE_Pattern
|
||||||
"""
|
"""
|
||||||
return re.compile("^" + CLIENT_PREFIX + path_regex)
|
patterns = [re.compile("^" + CLIENT_PREFIX + path_regex)]
|
||||||
|
if include_in_unstable:
|
||||||
|
unstable_prefix = CLIENT_PREFIX.replace("/api/v1", "/unstable")
|
||||||
|
patterns.append(re.compile("^" + unstable_prefix + path_regex))
|
||||||
|
for release in releases:
|
||||||
|
new_prefix = CLIENT_PREFIX.replace("/api/v1", "/r%d" % release)
|
||||||
|
patterns.append(re.compile("^" + new_prefix + path_regex))
|
||||||
|
return patterns
|
||||||
|
|
||||||
|
|
||||||
class ClientV1RestServlet(RestServlet):
|
class ClientV1RestServlet(RestServlet):
|
||||||
|
|
|
@ -18,7 +18,7 @@ from twisted.internet import defer
|
||||||
|
|
||||||
from synapse.api.errors import AuthError, SynapseError, Codes
|
from synapse.api.errors import AuthError, SynapseError, Codes
|
||||||
from synapse.types import RoomAlias
|
from synapse.types import RoomAlias
|
||||||
from .base import ClientV1RestServlet, client_path_pattern
|
from .base import ClientV1RestServlet, client_path_patterns
|
||||||
|
|
||||||
import simplejson as json
|
import simplejson as json
|
||||||
import logging
|
import logging
|
||||||
|
@ -32,7 +32,7 @@ def register_servlets(hs, http_server):
|
||||||
|
|
||||||
|
|
||||||
class ClientDirectoryServer(ClientV1RestServlet):
|
class ClientDirectoryServer(ClientV1RestServlet):
|
||||||
PATTERN = client_path_pattern("/directory/room/(?P<room_alias>[^/]*)$")
|
PATTERNS = client_path_patterns("/directory/room/(?P<room_alias>[^/]*)$")
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def on_GET(self, request, room_alias):
|
def on_GET(self, request, room_alias):
|
||||||
|
|
|
@ -18,7 +18,7 @@ from twisted.internet import defer
|
||||||
|
|
||||||
from synapse.api.errors import SynapseError
|
from synapse.api.errors import SynapseError
|
||||||
from synapse.streams.config import PaginationConfig
|
from synapse.streams.config import PaginationConfig
|
||||||
from .base import ClientV1RestServlet, client_path_pattern
|
from .base import ClientV1RestServlet, client_path_patterns
|
||||||
from synapse.events.utils import serialize_event
|
from synapse.events.utils import serialize_event
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
@ -28,7 +28,7 @@ logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class EventStreamRestServlet(ClientV1RestServlet):
|
class EventStreamRestServlet(ClientV1RestServlet):
|
||||||
PATTERN = client_path_pattern("/events$")
|
PATTERNS = client_path_patterns("/events$")
|
||||||
|
|
||||||
DEFAULT_LONGPOLL_TIME_MS = 30000
|
DEFAULT_LONGPOLL_TIME_MS = 30000
|
||||||
|
|
||||||
|
@ -72,7 +72,7 @@ class EventStreamRestServlet(ClientV1RestServlet):
|
||||||
|
|
||||||
# TODO: Unit test gets, with and without auth, with different kinds of events.
|
# TODO: Unit test gets, with and without auth, with different kinds of events.
|
||||||
class EventRestServlet(ClientV1RestServlet):
|
class EventRestServlet(ClientV1RestServlet):
|
||||||
PATTERN = client_path_pattern("/events/(?P<event_id>[^/]*)$")
|
PATTERNS = client_path_patterns("/events/(?P<event_id>[^/]*)$")
|
||||||
|
|
||||||
def __init__(self, hs):
|
def __init__(self, hs):
|
||||||
super(EventRestServlet, self).__init__(hs)
|
super(EventRestServlet, self).__init__(hs)
|
||||||
|
|
|
@ -16,12 +16,12 @@
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
from synapse.streams.config import PaginationConfig
|
from synapse.streams.config import PaginationConfig
|
||||||
from base import ClientV1RestServlet, client_path_pattern
|
from base import ClientV1RestServlet, client_path_patterns
|
||||||
|
|
||||||
|
|
||||||
# TODO: Needs unit testing
|
# TODO: Needs unit testing
|
||||||
class InitialSyncRestServlet(ClientV1RestServlet):
|
class InitialSyncRestServlet(ClientV1RestServlet):
|
||||||
PATTERN = client_path_pattern("/initialSync$")
|
PATTERNS = client_path_patterns("/initialSync$")
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def on_GET(self, request):
|
def on_GET(self, request):
|
||||||
|
|
|
@ -16,9 +16,8 @@
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
from synapse.api.errors import SynapseError, LoginError, Codes
|
from synapse.api.errors import SynapseError, LoginError, Codes
|
||||||
from synapse.http.client import SimpleHttpClient
|
|
||||||
from synapse.types import UserID
|
from synapse.types import UserID
|
||||||
from base import ClientV1RestServlet, client_path_pattern
|
from base import ClientV1RestServlet, client_path_patterns
|
||||||
|
|
||||||
import simplejson as json
|
import simplejson as json
|
||||||
import urllib
|
import urllib
|
||||||
|
@ -36,7 +35,7 @@ logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class LoginRestServlet(ClientV1RestServlet):
|
class LoginRestServlet(ClientV1RestServlet):
|
||||||
PATTERN = client_path_pattern("/login$")
|
PATTERNS = client_path_patterns("/login$")
|
||||||
PASS_TYPE = "m.login.password"
|
PASS_TYPE = "m.login.password"
|
||||||
SAML2_TYPE = "m.login.saml2"
|
SAML2_TYPE = "m.login.saml2"
|
||||||
CAS_TYPE = "m.login.cas"
|
CAS_TYPE = "m.login.cas"
|
||||||
|
@ -51,6 +50,7 @@ class LoginRestServlet(ClientV1RestServlet):
|
||||||
self.cas_server_url = hs.config.cas_server_url
|
self.cas_server_url = hs.config.cas_server_url
|
||||||
self.cas_required_attributes = hs.config.cas_required_attributes
|
self.cas_required_attributes = hs.config.cas_required_attributes
|
||||||
self.servername = hs.config.server_name
|
self.servername = hs.config.server_name
|
||||||
|
self.http_client = hs.get_simple_http_client()
|
||||||
|
|
||||||
def on_GET(self, request):
|
def on_GET(self, request):
|
||||||
flows = []
|
flows = []
|
||||||
|
@ -98,15 +98,12 @@ class LoginRestServlet(ClientV1RestServlet):
|
||||||
# TODO Delete this after all CAS clients switch to token login instead
|
# TODO Delete this after all CAS clients switch to token login instead
|
||||||
elif self.cas_enabled and (login_submission["type"] ==
|
elif self.cas_enabled and (login_submission["type"] ==
|
||||||
LoginRestServlet.CAS_TYPE):
|
LoginRestServlet.CAS_TYPE):
|
||||||
# TODO: get this from the homeserver rather than creating a new one for
|
|
||||||
# each request
|
|
||||||
http_client = SimpleHttpClient(self.hs)
|
|
||||||
uri = "%s/proxyValidate" % (self.cas_server_url,)
|
uri = "%s/proxyValidate" % (self.cas_server_url,)
|
||||||
args = {
|
args = {
|
||||||
"ticket": login_submission["ticket"],
|
"ticket": login_submission["ticket"],
|
||||||
"service": login_submission["service"]
|
"service": login_submission["service"]
|
||||||
}
|
}
|
||||||
body = yield http_client.get_raw(uri, args)
|
body = yield self.http_client.get_raw(uri, args)
|
||||||
result = yield self.do_cas_login(body)
|
result = yield self.do_cas_login(body)
|
||||||
defer.returnValue(result)
|
defer.returnValue(result)
|
||||||
elif login_submission["type"] == LoginRestServlet.TOKEN_TYPE:
|
elif login_submission["type"] == LoginRestServlet.TOKEN_TYPE:
|
||||||
|
@ -238,7 +235,7 @@ class LoginRestServlet(ClientV1RestServlet):
|
||||||
|
|
||||||
|
|
||||||
class SAML2RestServlet(ClientV1RestServlet):
|
class SAML2RestServlet(ClientV1RestServlet):
|
||||||
PATTERN = client_path_pattern("/login/saml2")
|
PATTERNS = client_path_patterns("/login/saml2", releases=())
|
||||||
|
|
||||||
def __init__(self, hs):
|
def __init__(self, hs):
|
||||||
super(SAML2RestServlet, self).__init__(hs)
|
super(SAML2RestServlet, self).__init__(hs)
|
||||||
|
@ -282,7 +279,7 @@ class SAML2RestServlet(ClientV1RestServlet):
|
||||||
|
|
||||||
# TODO Delete this after all CAS clients switch to token login instead
|
# TODO Delete this after all CAS clients switch to token login instead
|
||||||
class CasRestServlet(ClientV1RestServlet):
|
class CasRestServlet(ClientV1RestServlet):
|
||||||
PATTERN = client_path_pattern("/login/cas")
|
PATTERNS = client_path_patterns("/login/cas", releases=())
|
||||||
|
|
||||||
def __init__(self, hs):
|
def __init__(self, hs):
|
||||||
super(CasRestServlet, self).__init__(hs)
|
super(CasRestServlet, self).__init__(hs)
|
||||||
|
@ -293,7 +290,7 @@ class CasRestServlet(ClientV1RestServlet):
|
||||||
|
|
||||||
|
|
||||||
class CasRedirectServlet(ClientV1RestServlet):
|
class CasRedirectServlet(ClientV1RestServlet):
|
||||||
PATTERN = client_path_pattern("/login/cas/redirect")
|
PATTERNS = client_path_patterns("/login/cas/redirect", releases=())
|
||||||
|
|
||||||
def __init__(self, hs):
|
def __init__(self, hs):
|
||||||
super(CasRedirectServlet, self).__init__(hs)
|
super(CasRedirectServlet, self).__init__(hs)
|
||||||
|
@ -316,7 +313,7 @@ class CasRedirectServlet(ClientV1RestServlet):
|
||||||
|
|
||||||
|
|
||||||
class CasTicketServlet(ClientV1RestServlet):
|
class CasTicketServlet(ClientV1RestServlet):
|
||||||
PATTERN = client_path_pattern("/login/cas/ticket")
|
PATTERNS = client_path_patterns("/login/cas/ticket", releases=())
|
||||||
|
|
||||||
def __init__(self, hs):
|
def __init__(self, hs):
|
||||||
super(CasTicketServlet, self).__init__(hs)
|
super(CasTicketServlet, self).__init__(hs)
|
||||||
|
|
|
@ -19,7 +19,7 @@ from twisted.internet import defer
|
||||||
|
|
||||||
from synapse.api.errors import SynapseError
|
from synapse.api.errors import SynapseError
|
||||||
from synapse.types import UserID
|
from synapse.types import UserID
|
||||||
from .base import ClientV1RestServlet, client_path_pattern
|
from .base import ClientV1RestServlet, client_path_patterns
|
||||||
|
|
||||||
import simplejson as json
|
import simplejson as json
|
||||||
import logging
|
import logging
|
||||||
|
@ -28,7 +28,7 @@ logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class PresenceStatusRestServlet(ClientV1RestServlet):
|
class PresenceStatusRestServlet(ClientV1RestServlet):
|
||||||
PATTERN = client_path_pattern("/presence/(?P<user_id>[^/]*)/status")
|
PATTERNS = client_path_patterns("/presence/(?P<user_id>[^/]*)/status")
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def on_GET(self, request, user_id):
|
def on_GET(self, request, user_id):
|
||||||
|
@ -73,7 +73,7 @@ class PresenceStatusRestServlet(ClientV1RestServlet):
|
||||||
|
|
||||||
|
|
||||||
class PresenceListRestServlet(ClientV1RestServlet):
|
class PresenceListRestServlet(ClientV1RestServlet):
|
||||||
PATTERN = client_path_pattern("/presence/list/(?P<user_id>[^/]*)")
|
PATTERNS = client_path_patterns("/presence/list/(?P<user_id>[^/]*)")
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def on_GET(self, request, user_id):
|
def on_GET(self, request, user_id):
|
||||||
|
@ -120,7 +120,7 @@ class PresenceListRestServlet(ClientV1RestServlet):
|
||||||
if len(u) == 0:
|
if len(u) == 0:
|
||||||
continue
|
continue
|
||||||
invited_user = UserID.from_string(u)
|
invited_user = UserID.from_string(u)
|
||||||
yield self.handlers.presence_handler.send_invite(
|
yield self.handlers.presence_handler.send_presence_invite(
|
||||||
observer_user=user, observed_user=invited_user
|
observer_user=user, observed_user=invited_user
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -16,14 +16,14 @@
|
||||||
""" This module contains REST servlets to do with profile: /profile/<paths> """
|
""" This module contains REST servlets to do with profile: /profile/<paths> """
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
from .base import ClientV1RestServlet, client_path_pattern
|
from .base import ClientV1RestServlet, client_path_patterns
|
||||||
from synapse.types import UserID
|
from synapse.types import UserID
|
||||||
|
|
||||||
import simplejson as json
|
import simplejson as json
|
||||||
|
|
||||||
|
|
||||||
class ProfileDisplaynameRestServlet(ClientV1RestServlet):
|
class ProfileDisplaynameRestServlet(ClientV1RestServlet):
|
||||||
PATTERN = client_path_pattern("/profile/(?P<user_id>[^/]*)/displayname")
|
PATTERNS = client_path_patterns("/profile/(?P<user_id>[^/]*)/displayname")
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def on_GET(self, request, user_id):
|
def on_GET(self, request, user_id):
|
||||||
|
@ -56,7 +56,7 @@ class ProfileDisplaynameRestServlet(ClientV1RestServlet):
|
||||||
|
|
||||||
|
|
||||||
class ProfileAvatarURLRestServlet(ClientV1RestServlet):
|
class ProfileAvatarURLRestServlet(ClientV1RestServlet):
|
||||||
PATTERN = client_path_pattern("/profile/(?P<user_id>[^/]*)/avatar_url")
|
PATTERNS = client_path_patterns("/profile/(?P<user_id>[^/]*)/avatar_url")
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def on_GET(self, request, user_id):
|
def on_GET(self, request, user_id):
|
||||||
|
@ -89,7 +89,7 @@ class ProfileAvatarURLRestServlet(ClientV1RestServlet):
|
||||||
|
|
||||||
|
|
||||||
class ProfileRestServlet(ClientV1RestServlet):
|
class ProfileRestServlet(ClientV1RestServlet):
|
||||||
PATTERN = client_path_pattern("/profile/(?P<user_id>[^/]*)")
|
PATTERNS = client_path_patterns("/profile/(?P<user_id>[^/]*)")
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def on_GET(self, request, user_id):
|
def on_GET(self, request, user_id):
|
||||||
|
|
|
@ -18,7 +18,7 @@ from twisted.internet import defer
|
||||||
from synapse.api.errors import (
|
from synapse.api.errors import (
|
||||||
SynapseError, Codes, UnrecognizedRequestError, NotFoundError, StoreError
|
SynapseError, Codes, UnrecognizedRequestError, NotFoundError, StoreError
|
||||||
)
|
)
|
||||||
from .base import ClientV1RestServlet, client_path_pattern
|
from .base import ClientV1RestServlet, client_path_patterns
|
||||||
from synapse.storage.push_rule import (
|
from synapse.storage.push_rule import (
|
||||||
InconsistentRuleException, RuleNotFoundException
|
InconsistentRuleException, RuleNotFoundException
|
||||||
)
|
)
|
||||||
|
@ -31,7 +31,7 @@ import simplejson as json
|
||||||
|
|
||||||
|
|
||||||
class PushRuleRestServlet(ClientV1RestServlet):
|
class PushRuleRestServlet(ClientV1RestServlet):
|
||||||
PATTERN = client_path_pattern("/pushrules/.*$")
|
PATTERNS = client_path_patterns("/pushrules/.*$")
|
||||||
SLIGHTLY_PEDANTIC_TRAILING_SLASH_ERROR = (
|
SLIGHTLY_PEDANTIC_TRAILING_SLASH_ERROR = (
|
||||||
"Unrecognised request: You probably wanted a trailing slash")
|
"Unrecognised request: You probably wanted a trailing slash")
|
||||||
|
|
||||||
|
@ -207,7 +207,12 @@ class PushRuleRestServlet(ClientV1RestServlet):
|
||||||
|
|
||||||
def set_rule_attr(self, user_name, spec, val):
|
def set_rule_attr(self, user_name, spec, val):
|
||||||
if spec['attr'] == 'enabled':
|
if spec['attr'] == 'enabled':
|
||||||
|
if isinstance(val, dict) and "enabled" in val:
|
||||||
|
val = val["enabled"]
|
||||||
if not isinstance(val, bool):
|
if not isinstance(val, bool):
|
||||||
|
# Legacy fallback
|
||||||
|
# This should *actually* take a dict, but many clients pass
|
||||||
|
# bools directly, so let's not break them.
|
||||||
raise SynapseError(400, "Value for 'enabled' must be boolean")
|
raise SynapseError(400, "Value for 'enabled' must be boolean")
|
||||||
namespaced_rule_id = _namespaced_rule_id_from_spec(spec)
|
namespaced_rule_id = _namespaced_rule_id_from_spec(spec)
|
||||||
self.hs.get_datastore().set_push_rule_enabled(
|
self.hs.get_datastore().set_push_rule_enabled(
|
||||||
|
|
|
@ -17,13 +17,16 @@ from twisted.internet import defer
|
||||||
|
|
||||||
from synapse.api.errors import SynapseError, Codes
|
from synapse.api.errors import SynapseError, Codes
|
||||||
from synapse.push import PusherConfigException
|
from synapse.push import PusherConfigException
|
||||||
from .base import ClientV1RestServlet, client_path_pattern
|
from .base import ClientV1RestServlet, client_path_patterns
|
||||||
|
|
||||||
import simplejson as json
|
import simplejson as json
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class PusherRestServlet(ClientV1RestServlet):
|
class PusherRestServlet(ClientV1RestServlet):
|
||||||
PATTERN = client_path_pattern("/pushers/set$")
|
PATTERNS = client_path_patterns("/pushers/set$")
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def on_POST(self, request):
|
def on_POST(self, request):
|
||||||
|
@ -51,6 +54,9 @@ class PusherRestServlet(ClientV1RestServlet):
|
||||||
raise SynapseError(400, "Missing parameters: "+','.join(missing),
|
raise SynapseError(400, "Missing parameters: "+','.join(missing),
|
||||||
errcode=Codes.MISSING_PARAM)
|
errcode=Codes.MISSING_PARAM)
|
||||||
|
|
||||||
|
logger.debug("set pushkey %s to kind %s", content['pushkey'], content['kind'])
|
||||||
|
logger.debug("Got pushers request with body: %r", content)
|
||||||
|
|
||||||
append = False
|
append = False
|
||||||
if 'append' in content:
|
if 'append' in content:
|
||||||
append = content['append']
|
append = content['append']
|
||||||
|
|
|
@ -18,7 +18,7 @@ from twisted.internet import defer
|
||||||
|
|
||||||
from synapse.api.errors import SynapseError, Codes
|
from synapse.api.errors import SynapseError, Codes
|
||||||
from synapse.api.constants import LoginType
|
from synapse.api.constants import LoginType
|
||||||
from base import ClientV1RestServlet, client_path_pattern
|
from base import ClientV1RestServlet, client_path_patterns
|
||||||
import synapse.util.stringutils as stringutils
|
import synapse.util.stringutils as stringutils
|
||||||
|
|
||||||
from synapse.util.async import run_on_reactor
|
from synapse.util.async import run_on_reactor
|
||||||
|
@ -48,7 +48,7 @@ class RegisterRestServlet(ClientV1RestServlet):
|
||||||
handler doesn't have a concept of multi-stages or sessions.
|
handler doesn't have a concept of multi-stages or sessions.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
PATTERN = client_path_pattern("/register$")
|
PATTERNS = client_path_patterns("/register$", releases=(), include_in_unstable=False)
|
||||||
|
|
||||||
def __init__(self, hs):
|
def __init__(self, hs):
|
||||||
super(RegisterRestServlet, self).__init__(hs)
|
super(RegisterRestServlet, self).__init__(hs)
|
||||||
|
|
|
@ -16,7 +16,7 @@
|
||||||
""" This module contains REST servlets to do with rooms: /rooms/<paths> """
|
""" This module contains REST servlets to do with rooms: /rooms/<paths> """
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
from base import ClientV1RestServlet, client_path_pattern
|
from base import ClientV1RestServlet, client_path_patterns
|
||||||
from synapse.api.errors import SynapseError, Codes, AuthError
|
from synapse.api.errors import SynapseError, Codes, AuthError
|
||||||
from synapse.streams.config import PaginationConfig
|
from synapse.streams.config import PaginationConfig
|
||||||
from synapse.api.constants import EventTypes, Membership
|
from synapse.api.constants import EventTypes, Membership
|
||||||
|
@ -34,16 +34,16 @@ class RoomCreateRestServlet(ClientV1RestServlet):
|
||||||
# No PATTERN; we have custom dispatch rules here
|
# No PATTERN; we have custom dispatch rules here
|
||||||
|
|
||||||
def register(self, http_server):
|
def register(self, http_server):
|
||||||
PATTERN = "/createRoom"
|
PATTERNS = "/createRoom"
|
||||||
register_txn_path(self, PATTERN, http_server)
|
register_txn_path(self, PATTERNS, http_server)
|
||||||
# define CORS for all of /rooms in RoomCreateRestServlet for simplicity
|
# define CORS for all of /rooms in RoomCreateRestServlet for simplicity
|
||||||
http_server.register_path("OPTIONS",
|
http_server.register_paths("OPTIONS",
|
||||||
client_path_pattern("/rooms(?:/.*)?$"),
|
client_path_patterns("/rooms(?:/.*)?$"),
|
||||||
self.on_OPTIONS)
|
self.on_OPTIONS)
|
||||||
# define CORS for /createRoom[/txnid]
|
# define CORS for /createRoom[/txnid]
|
||||||
http_server.register_path("OPTIONS",
|
http_server.register_paths("OPTIONS",
|
||||||
client_path_pattern("/createRoom(?:/.*)?$"),
|
client_path_patterns("/createRoom(?:/.*)?$"),
|
||||||
self.on_OPTIONS)
|
self.on_OPTIONS)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def on_PUT(self, request, txn_id):
|
def on_PUT(self, request, txn_id):
|
||||||
|
@ -103,18 +103,18 @@ class RoomStateEventRestServlet(ClientV1RestServlet):
|
||||||
state_key = ("/rooms/(?P<room_id>[^/]*)/state/"
|
state_key = ("/rooms/(?P<room_id>[^/]*)/state/"
|
||||||
"(?P<event_type>[^/]*)/(?P<state_key>[^/]*)$")
|
"(?P<event_type>[^/]*)/(?P<state_key>[^/]*)$")
|
||||||
|
|
||||||
http_server.register_path("GET",
|
http_server.register_paths("GET",
|
||||||
client_path_pattern(state_key),
|
client_path_patterns(state_key),
|
||||||
self.on_GET)
|
self.on_GET)
|
||||||
http_server.register_path("PUT",
|
http_server.register_paths("PUT",
|
||||||
client_path_pattern(state_key),
|
client_path_patterns(state_key),
|
||||||
self.on_PUT)
|
self.on_PUT)
|
||||||
http_server.register_path("GET",
|
http_server.register_paths("GET",
|
||||||
client_path_pattern(no_state_key),
|
client_path_patterns(no_state_key),
|
||||||
self.on_GET_no_state_key)
|
self.on_GET_no_state_key)
|
||||||
http_server.register_path("PUT",
|
http_server.register_paths("PUT",
|
||||||
client_path_pattern(no_state_key),
|
client_path_patterns(no_state_key),
|
||||||
self.on_PUT_no_state_key)
|
self.on_PUT_no_state_key)
|
||||||
|
|
||||||
def on_GET_no_state_key(self, request, room_id, event_type):
|
def on_GET_no_state_key(self, request, room_id, event_type):
|
||||||
return self.on_GET(request, room_id, event_type, "")
|
return self.on_GET(request, room_id, event_type, "")
|
||||||
|
@ -170,8 +170,8 @@ class RoomSendEventRestServlet(ClientV1RestServlet):
|
||||||
|
|
||||||
def register(self, http_server):
|
def register(self, http_server):
|
||||||
# /rooms/$roomid/send/$event_type[/$txn_id]
|
# /rooms/$roomid/send/$event_type[/$txn_id]
|
||||||
PATTERN = ("/rooms/(?P<room_id>[^/]*)/send/(?P<event_type>[^/]*)")
|
PATTERNS = ("/rooms/(?P<room_id>[^/]*)/send/(?P<event_type>[^/]*)")
|
||||||
register_txn_path(self, PATTERN, http_server, with_get=True)
|
register_txn_path(self, PATTERNS, http_server, with_get=True)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def on_POST(self, request, room_id, event_type, txn_id=None):
|
def on_POST(self, request, room_id, event_type, txn_id=None):
|
||||||
|
@ -215,8 +215,8 @@ class JoinRoomAliasServlet(ClientV1RestServlet):
|
||||||
|
|
||||||
def register(self, http_server):
|
def register(self, http_server):
|
||||||
# /join/$room_identifier[/$txn_id]
|
# /join/$room_identifier[/$txn_id]
|
||||||
PATTERN = ("/join/(?P<room_identifier>[^/]*)")
|
PATTERNS = ("/join/(?P<room_identifier>[^/]*)")
|
||||||
register_txn_path(self, PATTERN, http_server)
|
register_txn_path(self, PATTERNS, http_server)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def on_POST(self, request, room_identifier, txn_id=None):
|
def on_POST(self, request, room_identifier, txn_id=None):
|
||||||
|
@ -280,7 +280,7 @@ class JoinRoomAliasServlet(ClientV1RestServlet):
|
||||||
|
|
||||||
# TODO: Needs unit testing
|
# TODO: Needs unit testing
|
||||||
class PublicRoomListRestServlet(ClientV1RestServlet):
|
class PublicRoomListRestServlet(ClientV1RestServlet):
|
||||||
PATTERN = client_path_pattern("/publicRooms$")
|
PATTERNS = client_path_patterns("/publicRooms$")
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def on_GET(self, request):
|
def on_GET(self, request):
|
||||||
|
@ -291,7 +291,7 @@ class PublicRoomListRestServlet(ClientV1RestServlet):
|
||||||
|
|
||||||
# TODO: Needs unit testing
|
# TODO: Needs unit testing
|
||||||
class RoomMemberListRestServlet(ClientV1RestServlet):
|
class RoomMemberListRestServlet(ClientV1RestServlet):
|
||||||
PATTERN = client_path_pattern("/rooms/(?P<room_id>[^/]*)/members$")
|
PATTERNS = client_path_patterns("/rooms/(?P<room_id>[^/]*)/members$")
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def on_GET(self, request, room_id):
|
def on_GET(self, request, room_id):
|
||||||
|
@ -328,7 +328,7 @@ class RoomMemberListRestServlet(ClientV1RestServlet):
|
||||||
|
|
||||||
# TODO: Needs better unit testing
|
# TODO: Needs better unit testing
|
||||||
class RoomMessageListRestServlet(ClientV1RestServlet):
|
class RoomMessageListRestServlet(ClientV1RestServlet):
|
||||||
PATTERN = client_path_pattern("/rooms/(?P<room_id>[^/]*)/messages$")
|
PATTERNS = client_path_patterns("/rooms/(?P<room_id>[^/]*)/messages$")
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def on_GET(self, request, room_id):
|
def on_GET(self, request, room_id):
|
||||||
|
@ -351,7 +351,7 @@ class RoomMessageListRestServlet(ClientV1RestServlet):
|
||||||
|
|
||||||
# TODO: Needs unit testing
|
# TODO: Needs unit testing
|
||||||
class RoomStateRestServlet(ClientV1RestServlet):
|
class RoomStateRestServlet(ClientV1RestServlet):
|
||||||
PATTERN = client_path_pattern("/rooms/(?P<room_id>[^/]*)/state$")
|
PATTERNS = client_path_patterns("/rooms/(?P<room_id>[^/]*)/state$")
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def on_GET(self, request, room_id):
|
def on_GET(self, request, room_id):
|
||||||
|
@ -368,7 +368,7 @@ class RoomStateRestServlet(ClientV1RestServlet):
|
||||||
|
|
||||||
# TODO: Needs unit testing
|
# TODO: Needs unit testing
|
||||||
class RoomInitialSyncRestServlet(ClientV1RestServlet):
|
class RoomInitialSyncRestServlet(ClientV1RestServlet):
|
||||||
PATTERN = client_path_pattern("/rooms/(?P<room_id>[^/]*)/initialSync$")
|
PATTERNS = client_path_patterns("/rooms/(?P<room_id>[^/]*)/initialSync$")
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def on_GET(self, request, room_id):
|
def on_GET(self, request, room_id):
|
||||||
|
@ -383,32 +383,8 @@ class RoomInitialSyncRestServlet(ClientV1RestServlet):
|
||||||
defer.returnValue((200, content))
|
defer.returnValue((200, content))
|
||||||
|
|
||||||
|
|
||||||
class RoomTriggerBackfill(ClientV1RestServlet):
|
|
||||||
PATTERN = client_path_pattern("/rooms/(?P<room_id>[^/]*)/backfill$")
|
|
||||||
|
|
||||||
def __init__(self, hs):
|
|
||||||
super(RoomTriggerBackfill, self).__init__(hs)
|
|
||||||
self.clock = hs.get_clock()
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
|
||||||
def on_GET(self, request, room_id):
|
|
||||||
remote_server = urllib.unquote(
|
|
||||||
request.args["remote"][0]
|
|
||||||
).decode("UTF-8")
|
|
||||||
|
|
||||||
limit = int(request.args["limit"][0])
|
|
||||||
|
|
||||||
handler = self.handlers.federation_handler
|
|
||||||
events = yield handler.backfill(remote_server, room_id, limit)
|
|
||||||
|
|
||||||
time_now = self.clock.time_msec()
|
|
||||||
|
|
||||||
res = [serialize_event(event, time_now) for event in events]
|
|
||||||
defer.returnValue((200, res))
|
|
||||||
|
|
||||||
|
|
||||||
class RoomEventContext(ClientV1RestServlet):
|
class RoomEventContext(ClientV1RestServlet):
|
||||||
PATTERN = client_path_pattern(
|
PATTERNS = client_path_patterns(
|
||||||
"/rooms/(?P<room_id>[^/]*)/context/(?P<event_id>[^/]*)$"
|
"/rooms/(?P<room_id>[^/]*)/context/(?P<event_id>[^/]*)$"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -447,9 +423,9 @@ class RoomMembershipRestServlet(ClientV1RestServlet):
|
||||||
|
|
||||||
def register(self, http_server):
|
def register(self, http_server):
|
||||||
# /rooms/$roomid/[invite|join|leave]
|
# /rooms/$roomid/[invite|join|leave]
|
||||||
PATTERN = ("/rooms/(?P<room_id>[^/]*)/"
|
PATTERNS = ("/rooms/(?P<room_id>[^/]*)/"
|
||||||
"(?P<membership_action>join|invite|leave|ban|kick)")
|
"(?P<membership_action>join|invite|leave|ban|kick|forget)")
|
||||||
register_txn_path(self, PATTERN, http_server)
|
register_txn_path(self, PATTERNS, http_server)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def on_POST(self, request, room_id, membership_action, txn_id=None):
|
def on_POST(self, request, room_id, membership_action, txn_id=None):
|
||||||
|
@ -458,6 +434,8 @@ class RoomMembershipRestServlet(ClientV1RestServlet):
|
||||||
allow_guest=True
|
allow_guest=True
|
||||||
)
|
)
|
||||||
|
|
||||||
|
effective_membership_action = membership_action
|
||||||
|
|
||||||
if is_guest and membership_action not in {Membership.JOIN, Membership.LEAVE}:
|
if is_guest and membership_action not in {Membership.JOIN, Membership.LEAVE}:
|
||||||
raise AuthError(403, "Guest access not allowed")
|
raise AuthError(403, "Guest access not allowed")
|
||||||
|
|
||||||
|
@ -488,11 +466,13 @@ class RoomMembershipRestServlet(ClientV1RestServlet):
|
||||||
UserID.from_string(state_key)
|
UserID.from_string(state_key)
|
||||||
|
|
||||||
if membership_action == "kick":
|
if membership_action == "kick":
|
||||||
membership_action = "leave"
|
effective_membership_action = "leave"
|
||||||
|
elif membership_action == "forget":
|
||||||
|
effective_membership_action = "leave"
|
||||||
|
|
||||||
msg_handler = self.handlers.message_handler
|
msg_handler = self.handlers.message_handler
|
||||||
|
|
||||||
content = {"membership": unicode(membership_action)}
|
content = {"membership": unicode(effective_membership_action)}
|
||||||
if is_guest:
|
if is_guest:
|
||||||
content["kind"] = "guest"
|
content["kind"] = "guest"
|
||||||
|
|
||||||
|
@ -509,6 +489,9 @@ class RoomMembershipRestServlet(ClientV1RestServlet):
|
||||||
is_guest=is_guest,
|
is_guest=is_guest,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if membership_action == "forget":
|
||||||
|
yield self.handlers.room_member_handler.forget(user, room_id)
|
||||||
|
|
||||||
defer.returnValue((200, {}))
|
defer.returnValue((200, {}))
|
||||||
|
|
||||||
def _has_3pid_invite_keys(self, content):
|
def _has_3pid_invite_keys(self, content):
|
||||||
|
@ -536,8 +519,8 @@ class RoomMembershipRestServlet(ClientV1RestServlet):
|
||||||
|
|
||||||
class RoomRedactEventRestServlet(ClientV1RestServlet):
|
class RoomRedactEventRestServlet(ClientV1RestServlet):
|
||||||
def register(self, http_server):
|
def register(self, http_server):
|
||||||
PATTERN = ("/rooms/(?P<room_id>[^/]*)/redact/(?P<event_id>[^/]*)")
|
PATTERNS = ("/rooms/(?P<room_id>[^/]*)/redact/(?P<event_id>[^/]*)")
|
||||||
register_txn_path(self, PATTERN, http_server)
|
register_txn_path(self, PATTERNS, http_server)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def on_POST(self, request, room_id, event_id, txn_id=None):
|
def on_POST(self, request, room_id, event_id, txn_id=None):
|
||||||
|
@ -575,7 +558,7 @@ class RoomRedactEventRestServlet(ClientV1RestServlet):
|
||||||
|
|
||||||
|
|
||||||
class RoomTypingRestServlet(ClientV1RestServlet):
|
class RoomTypingRestServlet(ClientV1RestServlet):
|
||||||
PATTERN = client_path_pattern(
|
PATTERNS = client_path_patterns(
|
||||||
"/rooms/(?P<room_id>[^/]*)/typing/(?P<user_id>[^/]*)$"
|
"/rooms/(?P<room_id>[^/]*)/typing/(?P<user_id>[^/]*)$"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -608,7 +591,7 @@ class RoomTypingRestServlet(ClientV1RestServlet):
|
||||||
|
|
||||||
|
|
||||||
class SearchRestServlet(ClientV1RestServlet):
|
class SearchRestServlet(ClientV1RestServlet):
|
||||||
PATTERN = client_path_pattern(
|
PATTERNS = client_path_patterns(
|
||||||
"/search$"
|
"/search$"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -648,20 +631,20 @@ def register_txn_path(servlet, regex_string, http_server, with_get=False):
|
||||||
http_server : The http_server to register paths with.
|
http_server : The http_server to register paths with.
|
||||||
with_get: True to also register respective GET paths for the PUTs.
|
with_get: True to also register respective GET paths for the PUTs.
|
||||||
"""
|
"""
|
||||||
http_server.register_path(
|
http_server.register_paths(
|
||||||
"POST",
|
"POST",
|
||||||
client_path_pattern(regex_string + "$"),
|
client_path_patterns(regex_string + "$"),
|
||||||
servlet.on_POST
|
servlet.on_POST
|
||||||
)
|
)
|
||||||
http_server.register_path(
|
http_server.register_paths(
|
||||||
"PUT",
|
"PUT",
|
||||||
client_path_pattern(regex_string + "/(?P<txn_id>[^/]*)$"),
|
client_path_patterns(regex_string + "/(?P<txn_id>[^/]*)$"),
|
||||||
servlet.on_PUT
|
servlet.on_PUT
|
||||||
)
|
)
|
||||||
if with_get:
|
if with_get:
|
||||||
http_server.register_path(
|
http_server.register_paths(
|
||||||
"GET",
|
"GET",
|
||||||
client_path_pattern(regex_string + "/(?P<txn_id>[^/]*)$"),
|
client_path_patterns(regex_string + "/(?P<txn_id>[^/]*)$"),
|
||||||
servlet.on_GET
|
servlet.on_GET
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -672,7 +655,6 @@ def register_servlets(hs, http_server):
|
||||||
RoomMemberListRestServlet(hs).register(http_server)
|
RoomMemberListRestServlet(hs).register(http_server)
|
||||||
RoomMessageListRestServlet(hs).register(http_server)
|
RoomMessageListRestServlet(hs).register(http_server)
|
||||||
JoinRoomAliasServlet(hs).register(http_server)
|
JoinRoomAliasServlet(hs).register(http_server)
|
||||||
RoomTriggerBackfill(hs).register(http_server)
|
|
||||||
RoomMembershipRestServlet(hs).register(http_server)
|
RoomMembershipRestServlet(hs).register(http_server)
|
||||||
RoomSendEventRestServlet(hs).register(http_server)
|
RoomSendEventRestServlet(hs).register(http_server)
|
||||||
PublicRoomListRestServlet(hs).register(http_server)
|
PublicRoomListRestServlet(hs).register(http_server)
|
||||||
|
|
|
@ -15,7 +15,7 @@
|
||||||
|
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
from base import ClientV1RestServlet, client_path_pattern
|
from base import ClientV1RestServlet, client_path_patterns
|
||||||
|
|
||||||
|
|
||||||
import hmac
|
import hmac
|
||||||
|
@ -24,7 +24,7 @@ import base64
|
||||||
|
|
||||||
|
|
||||||
class VoipRestServlet(ClientV1RestServlet):
|
class VoipRestServlet(ClientV1RestServlet):
|
||||||
PATTERN = client_path_pattern("/voip/turnServer$")
|
PATTERNS = client_path_patterns("/voip/turnServer$")
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def on_GET(self, request):
|
def on_GET(self, request):
|
||||||
|
|
|
@ -12,37 +12,3 @@
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from . import (
|
|
||||||
sync,
|
|
||||||
filter,
|
|
||||||
account,
|
|
||||||
register,
|
|
||||||
auth,
|
|
||||||
receipts,
|
|
||||||
keys,
|
|
||||||
tokenrefresh,
|
|
||||||
tags,
|
|
||||||
)
|
|
||||||
|
|
||||||
from synapse.http.server import JsonResource
|
|
||||||
|
|
||||||
|
|
||||||
class ClientV2AlphaRestResource(JsonResource):
|
|
||||||
"""A resource for version 2 alpha of the matrix client API."""
|
|
||||||
|
|
||||||
def __init__(self, hs):
|
|
||||||
JsonResource.__init__(self, hs, canonical_json=False)
|
|
||||||
self.register_servlets(self, hs)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def register_servlets(client_resource, hs):
|
|
||||||
sync.register_servlets(hs, client_resource)
|
|
||||||
filter.register_servlets(hs, client_resource)
|
|
||||||
account.register_servlets(hs, client_resource)
|
|
||||||
register.register_servlets(hs, client_resource)
|
|
||||||
auth.register_servlets(hs, client_resource)
|
|
||||||
receipts.register_servlets(hs, client_resource)
|
|
||||||
keys.register_servlets(hs, client_resource)
|
|
||||||
tokenrefresh.register_servlets(hs, client_resource)
|
|
||||||
tags.register_servlets(hs, client_resource)
|
|
||||||
|
|
|
@ -27,7 +27,7 @@ import simplejson
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def client_v2_pattern(path_regex):
|
def client_v2_patterns(path_regex, releases=(0,)):
|
||||||
"""Creates a regex compiled client path with the correct client path
|
"""Creates a regex compiled client path with the correct client path
|
||||||
prefix.
|
prefix.
|
||||||
|
|
||||||
|
@ -37,7 +37,13 @@ def client_v2_pattern(path_regex):
|
||||||
Returns:
|
Returns:
|
||||||
SRE_Pattern
|
SRE_Pattern
|
||||||
"""
|
"""
|
||||||
return re.compile("^" + CLIENT_V2_ALPHA_PREFIX + path_regex)
|
patterns = [re.compile("^" + CLIENT_V2_ALPHA_PREFIX + path_regex)]
|
||||||
|
unstable_prefix = CLIENT_V2_ALPHA_PREFIX.replace("/v2_alpha", "/unstable")
|
||||||
|
patterns.append(re.compile("^" + unstable_prefix + path_regex))
|
||||||
|
for release in releases:
|
||||||
|
new_prefix = CLIENT_V2_ALPHA_PREFIX.replace("/v2_alpha", "/r%d" % release)
|
||||||
|
patterns.append(re.compile("^" + new_prefix + path_regex))
|
||||||
|
return patterns
|
||||||
|
|
||||||
|
|
||||||
def parse_request_allow_empty(request):
|
def parse_request_allow_empty(request):
|
||||||
|
|
|
@ -20,7 +20,7 @@ from synapse.api.errors import LoginError, SynapseError, Codes
|
||||||
from synapse.http.servlet import RestServlet
|
from synapse.http.servlet import RestServlet
|
||||||
from synapse.util.async import run_on_reactor
|
from synapse.util.async import run_on_reactor
|
||||||
|
|
||||||
from ._base import client_v2_pattern, parse_json_dict_from_request
|
from ._base import client_v2_patterns, parse_json_dict_from_request
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
@ -29,7 +29,7 @@ logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class PasswordRestServlet(RestServlet):
|
class PasswordRestServlet(RestServlet):
|
||||||
PATTERN = client_v2_pattern("/account/password")
|
PATTERNS = client_v2_patterns("/account/password")
|
||||||
|
|
||||||
def __init__(self, hs):
|
def __init__(self, hs):
|
||||||
super(PasswordRestServlet, self).__init__()
|
super(PasswordRestServlet, self).__init__()
|
||||||
|
@ -89,7 +89,7 @@ class PasswordRestServlet(RestServlet):
|
||||||
|
|
||||||
|
|
||||||
class ThreepidRestServlet(RestServlet):
|
class ThreepidRestServlet(RestServlet):
|
||||||
PATTERN = client_v2_pattern("/account/3pid")
|
PATTERNS = client_v2_patterns("/account/3pid")
|
||||||
|
|
||||||
def __init__(self, hs):
|
def __init__(self, hs):
|
||||||
super(ThreepidRestServlet, self).__init__()
|
super(ThreepidRestServlet, self).__init__()
|
||||||
|
|
111
synapse/rest/client/v2_alpha/account_data.py
Normal file
111
synapse/rest/client/v2_alpha/account_data.py
Normal file
|
@ -0,0 +1,111 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Copyright 2015 OpenMarket Ltd
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from ._base import client_v2_patterns
|
||||||
|
|
||||||
|
from synapse.http.servlet import RestServlet
|
||||||
|
from synapse.api.errors import AuthError, SynapseError
|
||||||
|
|
||||||
|
from twisted.internet import defer
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import simplejson as json
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class AccountDataServlet(RestServlet):
|
||||||
|
"""
|
||||||
|
PUT /user/{user_id}/account_data/{account_dataType} HTTP/1.1
|
||||||
|
"""
|
||||||
|
PATTERNS = client_v2_patterns(
|
||||||
|
"/user/(?P<user_id>[^/]*)/account_data/(?P<account_data_type>[^/]*)"
|
||||||
|
)
|
||||||
|
|
||||||
|
def __init__(self, hs):
|
||||||
|
super(AccountDataServlet, self).__init__()
|
||||||
|
self.auth = hs.get_auth()
|
||||||
|
self.store = hs.get_datastore()
|
||||||
|
self.notifier = hs.get_notifier()
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def on_PUT(self, request, user_id, account_data_type):
|
||||||
|
auth_user, _, _ = yield self.auth.get_user_by_req(request)
|
||||||
|
if user_id != auth_user.to_string():
|
||||||
|
raise AuthError(403, "Cannot add account data for other users.")
|
||||||
|
|
||||||
|
try:
|
||||||
|
content_bytes = request.content.read()
|
||||||
|
body = json.loads(content_bytes)
|
||||||
|
except:
|
||||||
|
raise SynapseError(400, "Invalid JSON")
|
||||||
|
|
||||||
|
max_id = yield self.store.add_account_data_for_user(
|
||||||
|
user_id, account_data_type, body
|
||||||
|
)
|
||||||
|
|
||||||
|
yield self.notifier.on_new_event(
|
||||||
|
"account_data_key", max_id, users=[user_id]
|
||||||
|
)
|
||||||
|
|
||||||
|
defer.returnValue((200, {}))
|
||||||
|
|
||||||
|
|
||||||
|
class RoomAccountDataServlet(RestServlet):
|
||||||
|
"""
|
||||||
|
PUT /user/{user_id}/rooms/{room_id}/account_data/{account_dataType} HTTP/1.1
|
||||||
|
"""
|
||||||
|
PATTERNS = client_v2_patterns(
|
||||||
|
"/user/(?P<user_id>[^/]*)"
|
||||||
|
"/rooms/(?P<room_id>[^/]*)"
|
||||||
|
"/account_data/(?P<account_data_type>[^/]*)"
|
||||||
|
)
|
||||||
|
|
||||||
|
def __init__(self, hs):
|
||||||
|
super(RoomAccountDataServlet, self).__init__()
|
||||||
|
self.auth = hs.get_auth()
|
||||||
|
self.store = hs.get_datastore()
|
||||||
|
self.notifier = hs.get_notifier()
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def on_PUT(self, request, user_id, room_id, account_data_type):
|
||||||
|
auth_user, _, _ = yield self.auth.get_user_by_req(request)
|
||||||
|
if user_id != auth_user.to_string():
|
||||||
|
raise AuthError(403, "Cannot add account data for other users.")
|
||||||
|
|
||||||
|
try:
|
||||||
|
content_bytes = request.content.read()
|
||||||
|
body = json.loads(content_bytes)
|
||||||
|
except:
|
||||||
|
raise SynapseError(400, "Invalid JSON")
|
||||||
|
|
||||||
|
if not isinstance(body, dict):
|
||||||
|
raise ValueError("Expected a JSON object")
|
||||||
|
|
||||||
|
max_id = yield self.store.add_account_data_to_room(
|
||||||
|
user_id, room_id, account_data_type, body
|
||||||
|
)
|
||||||
|
|
||||||
|
yield self.notifier.on_new_event(
|
||||||
|
"account_data_key", max_id, users=[user_id]
|
||||||
|
)
|
||||||
|
|
||||||
|
defer.returnValue((200, {}))
|
||||||
|
|
||||||
|
|
||||||
|
def register_servlets(hs, http_server):
|
||||||
|
AccountDataServlet(hs).register(http_server)
|
||||||
|
RoomAccountDataServlet(hs).register(http_server)
|
|
@ -20,7 +20,7 @@ from synapse.api.errors import SynapseError
|
||||||
from synapse.api.urls import CLIENT_V2_ALPHA_PREFIX
|
from synapse.api.urls import CLIENT_V2_ALPHA_PREFIX
|
||||||
from synapse.http.servlet import RestServlet
|
from synapse.http.servlet import RestServlet
|
||||||
|
|
||||||
from ._base import client_v2_pattern
|
from ._base import client_v2_patterns
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
@ -97,7 +97,7 @@ class AuthRestServlet(RestServlet):
|
||||||
cannot be handled in the normal flow (with requests to the same endpoint).
|
cannot be handled in the normal flow (with requests to the same endpoint).
|
||||||
Current use is for web fallback auth.
|
Current use is for web fallback auth.
|
||||||
"""
|
"""
|
||||||
PATTERN = client_v2_pattern("/auth/(?P<stagetype>[\w\.]*)/fallback/web")
|
PATTERNS = client_v2_patterns("/auth/(?P<stagetype>[\w\.]*)/fallback/web")
|
||||||
|
|
||||||
def __init__(self, hs):
|
def __init__(self, hs):
|
||||||
super(AuthRestServlet, self).__init__()
|
super(AuthRestServlet, self).__init__()
|
||||||
|
|
|
@ -19,7 +19,7 @@ from synapse.api.errors import AuthError, SynapseError
|
||||||
from synapse.http.servlet import RestServlet
|
from synapse.http.servlet import RestServlet
|
||||||
from synapse.types import UserID
|
from synapse.types import UserID
|
||||||
|
|
||||||
from ._base import client_v2_pattern
|
from ._base import client_v2_patterns
|
||||||
|
|
||||||
import simplejson as json
|
import simplejson as json
|
||||||
import logging
|
import logging
|
||||||
|
@ -29,7 +29,7 @@ logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class GetFilterRestServlet(RestServlet):
|
class GetFilterRestServlet(RestServlet):
|
||||||
PATTERN = client_v2_pattern("/user/(?P<user_id>[^/]*)/filter/(?P<filter_id>[^/]*)")
|
PATTERNS = client_v2_patterns("/user/(?P<user_id>[^/]*)/filter/(?P<filter_id>[^/]*)")
|
||||||
|
|
||||||
def __init__(self, hs):
|
def __init__(self, hs):
|
||||||
super(GetFilterRestServlet, self).__init__()
|
super(GetFilterRestServlet, self).__init__()
|
||||||
|
@ -65,7 +65,7 @@ class GetFilterRestServlet(RestServlet):
|
||||||
|
|
||||||
|
|
||||||
class CreateFilterRestServlet(RestServlet):
|
class CreateFilterRestServlet(RestServlet):
|
||||||
PATTERN = client_v2_pattern("/user/(?P<user_id>[^/]*)/filter")
|
PATTERNS = client_v2_patterns("/user/(?P<user_id>[^/]*)/filter")
|
||||||
|
|
||||||
def __init__(self, hs):
|
def __init__(self, hs):
|
||||||
super(CreateFilterRestServlet, self).__init__()
|
super(CreateFilterRestServlet, self).__init__()
|
||||||
|
|
|
@ -21,7 +21,7 @@ from synapse.types import UserID
|
||||||
|
|
||||||
from canonicaljson import encode_canonical_json
|
from canonicaljson import encode_canonical_json
|
||||||
|
|
||||||
from ._base import client_v2_pattern
|
from ._base import client_v2_patterns
|
||||||
|
|
||||||
import simplejson as json
|
import simplejson as json
|
||||||
import logging
|
import logging
|
||||||
|
@ -54,7 +54,7 @@ class KeyUploadServlet(RestServlet):
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
"""
|
"""
|
||||||
PATTERN = client_v2_pattern("/keys/upload/(?P<device_id>[^/]*)")
|
PATTERNS = client_v2_patterns("/keys/upload/(?P<device_id>[^/]*)", releases=())
|
||||||
|
|
||||||
def __init__(self, hs):
|
def __init__(self, hs):
|
||||||
super(KeyUploadServlet, self).__init__()
|
super(KeyUploadServlet, self).__init__()
|
||||||
|
@ -154,12 +154,13 @@ class KeyQueryServlet(RestServlet):
|
||||||
} } } } } }
|
} } } } } }
|
||||||
"""
|
"""
|
||||||
|
|
||||||
PATTERN = client_v2_pattern(
|
PATTERNS = client_v2_patterns(
|
||||||
"/keys/query(?:"
|
"/keys/query(?:"
|
||||||
"/(?P<user_id>[^/]*)(?:"
|
"/(?P<user_id>[^/]*)(?:"
|
||||||
"/(?P<device_id>[^/]*)"
|
"/(?P<device_id>[^/]*)"
|
||||||
")?"
|
")?"
|
||||||
")?"
|
")?",
|
||||||
|
releases=()
|
||||||
)
|
)
|
||||||
|
|
||||||
def __init__(self, hs):
|
def __init__(self, hs):
|
||||||
|
@ -245,10 +246,11 @@ class OneTimeKeyServlet(RestServlet):
|
||||||
} } } }
|
} } } }
|
||||||
|
|
||||||
"""
|
"""
|
||||||
PATTERN = client_v2_pattern(
|
PATTERNS = client_v2_patterns(
|
||||||
"/keys/claim(?:/?|(?:/"
|
"/keys/claim(?:/?|(?:/"
|
||||||
"(?P<user_id>[^/]*)/(?P<device_id>[^/]*)/(?P<algorithm>[^/]*)"
|
"(?P<user_id>[^/]*)/(?P<device_id>[^/]*)/(?P<algorithm>[^/]*)"
|
||||||
")?)"
|
")?)",
|
||||||
|
releases=()
|
||||||
)
|
)
|
||||||
|
|
||||||
def __init__(self, hs):
|
def __init__(self, hs):
|
||||||
|
|
|
@ -17,7 +17,7 @@ from twisted.internet import defer
|
||||||
|
|
||||||
from synapse.api.errors import SynapseError
|
from synapse.api.errors import SynapseError
|
||||||
from synapse.http.servlet import RestServlet
|
from synapse.http.servlet import RestServlet
|
||||||
from ._base import client_v2_pattern
|
from ._base import client_v2_patterns
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
@ -26,7 +26,7 @@ logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class ReceiptRestServlet(RestServlet):
|
class ReceiptRestServlet(RestServlet):
|
||||||
PATTERN = client_v2_pattern(
|
PATTERNS = client_v2_patterns(
|
||||||
"/rooms/(?P<room_id>[^/]*)"
|
"/rooms/(?P<room_id>[^/]*)"
|
||||||
"/receipt/(?P<receipt_type>[^/]*)"
|
"/receipt/(?P<receipt_type>[^/]*)"
|
||||||
"/(?P<event_id>[^/]*)$"
|
"/(?P<event_id>[^/]*)$"
|
||||||
|
|
|
@ -19,7 +19,7 @@ from synapse.api.constants import LoginType
|
||||||
from synapse.api.errors import SynapseError, Codes, UnrecognizedRequestError
|
from synapse.api.errors import SynapseError, Codes, UnrecognizedRequestError
|
||||||
from synapse.http.servlet import RestServlet
|
from synapse.http.servlet import RestServlet
|
||||||
|
|
||||||
from ._base import client_v2_pattern, parse_json_dict_from_request
|
from ._base import client_v2_patterns, parse_json_dict_from_request
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import hmac
|
import hmac
|
||||||
|
@ -41,7 +41,7 @@ logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class RegisterRestServlet(RestServlet):
|
class RegisterRestServlet(RestServlet):
|
||||||
PATTERN = client_v2_pattern("/register")
|
PATTERNS = client_v2_patterns("/register")
|
||||||
|
|
||||||
def __init__(self, hs):
|
def __init__(self, hs):
|
||||||
super(RegisterRestServlet, self).__init__()
|
super(RegisterRestServlet, self).__init__()
|
||||||
|
|
|
@ -25,11 +25,14 @@ from synapse.events.utils import (
|
||||||
serialize_event, format_event_for_client_v2_without_room_id,
|
serialize_event, format_event_for_client_v2_without_room_id,
|
||||||
)
|
)
|
||||||
from synapse.api.filtering import FilterCollection
|
from synapse.api.filtering import FilterCollection
|
||||||
from ._base import client_v2_pattern
|
from synapse.api.errors import SynapseError
|
||||||
|
from ._base import client_v2_patterns
|
||||||
|
|
||||||
import copy
|
import copy
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
import ujson as json
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@ -48,7 +51,7 @@ class SyncRestServlet(RestServlet):
|
||||||
"next_batch": // batch token for the next /sync
|
"next_batch": // batch token for the next /sync
|
||||||
"presence": // presence data for the user.
|
"presence": // presence data for the user.
|
||||||
"rooms": {
|
"rooms": {
|
||||||
"joined": { // Joined rooms being updated.
|
"join": { // Joined rooms being updated.
|
||||||
"${room_id}": { // Id of the room being updated
|
"${room_id}": { // Id of the room being updated
|
||||||
"event_map": // Map of EventID -> event JSON.
|
"event_map": // Map of EventID -> event JSON.
|
||||||
"timeline": { // The recent events in the room if gap is "true"
|
"timeline": { // The recent events in the room if gap is "true"
|
||||||
|
@ -63,13 +66,13 @@ class SyncRestServlet(RestServlet):
|
||||||
"ephemeral": {"events": []} // list of event objects
|
"ephemeral": {"events": []} // list of event objects
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"invited": {}, // Invited rooms being updated.
|
"invite": {}, // Invited rooms being updated.
|
||||||
"archived": {} // Archived rooms being updated.
|
"leave": {} // Archived rooms being updated.
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"""
|
"""
|
||||||
|
|
||||||
PATTERN = client_v2_pattern("/sync$")
|
PATTERNS = client_v2_patterns("/sync$")
|
||||||
ALLOWED_PRESENCE = set(["online", "offline"])
|
ALLOWED_PRESENCE = set(["online", "offline"])
|
||||||
|
|
||||||
def __init__(self, hs):
|
def __init__(self, hs):
|
||||||
|
@ -82,7 +85,9 @@ class SyncRestServlet(RestServlet):
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def on_GET(self, request):
|
def on_GET(self, request):
|
||||||
user, token_id, _ = yield self.auth.get_user_by_req(request)
|
user, token_id, is_guest = yield self.auth.get_user_by_req(
|
||||||
|
request, allow_guest=True
|
||||||
|
)
|
||||||
|
|
||||||
timeout = parse_integer(request, "timeout", default=0)
|
timeout = parse_integer(request, "timeout", default=0)
|
||||||
since = parse_string(request, "since")
|
since = parse_string(request, "since")
|
||||||
|
@ -100,15 +105,29 @@ class SyncRestServlet(RestServlet):
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
try:
|
if filter_id and filter_id.startswith('{'):
|
||||||
filter = yield self.filtering.get_user_filter(
|
try:
|
||||||
user.localpart, filter_id
|
filter_object = json.loads(filter_id)
|
||||||
|
except:
|
||||||
|
raise SynapseError(400, "Invalid filter JSON")
|
||||||
|
self.filtering._check_valid_filter(filter_object)
|
||||||
|
filter = FilterCollection(filter_object)
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
filter = yield self.filtering.get_user_filter(
|
||||||
|
user.localpart, filter_id
|
||||||
|
)
|
||||||
|
except:
|
||||||
|
filter = FilterCollection({})
|
||||||
|
|
||||||
|
if is_guest and filter.list_rooms() is None:
|
||||||
|
raise SynapseError(
|
||||||
|
400, "Guest users must provide a list of rooms in the filter"
|
||||||
)
|
)
|
||||||
except:
|
|
||||||
filter = FilterCollection({})
|
|
||||||
|
|
||||||
sync_config = SyncConfig(
|
sync_config = SyncConfig(
|
||||||
user=user,
|
user=user,
|
||||||
|
is_guest=is_guest,
|
||||||
filter=filter,
|
filter=filter,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -144,6 +163,9 @@ class SyncRestServlet(RestServlet):
|
||||||
)
|
)
|
||||||
|
|
||||||
response_content = {
|
response_content = {
|
||||||
|
"account_data": self.encode_account_data(
|
||||||
|
sync_result.account_data, filter, time_now
|
||||||
|
),
|
||||||
"presence": self.encode_presence(
|
"presence": self.encode_presence(
|
||||||
sync_result.presence, filter, time_now
|
sync_result.presence, filter, time_now
|
||||||
),
|
),
|
||||||
|
@ -165,6 +187,9 @@ class SyncRestServlet(RestServlet):
|
||||||
formatted.append(event)
|
formatted.append(event)
|
||||||
return {"events": filter.filter_presence(formatted)}
|
return {"events": filter.filter_presence(formatted)}
|
||||||
|
|
||||||
|
def encode_account_data(self, events, filter, time_now):
|
||||||
|
return {"events": filter.filter_account_data(events)}
|
||||||
|
|
||||||
def encode_joined(self, rooms, filter, time_now, token_id):
|
def encode_joined(self, rooms, filter, time_now, token_id):
|
||||||
"""
|
"""
|
||||||
Encode the joined rooms in a sync result
|
Encode the joined rooms in a sync result
|
||||||
|
@ -333,20 +358,36 @@ class SyncRestServlet(RestServlet):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
prev_event_id = timeline_event.unsigned.get("replaces_state", None)
|
prev_event_id = timeline_event.unsigned.get("replaces_state", None)
|
||||||
logger.debug("Replacing %s with %s in state dict",
|
|
||||||
timeline_event.event_id, prev_event_id)
|
|
||||||
|
|
||||||
if prev_event_id is None:
|
prev_content = timeline_event.unsigned.get('prev_content')
|
||||||
|
prev_sender = timeline_event.unsigned.get('prev_sender')
|
||||||
|
# Empircally it seems possible for the event to have a
|
||||||
|
# "replaces_state" key but not a prev_content or prev_sender
|
||||||
|
# markjh conjectures that it could be due to the server not
|
||||||
|
# having a copy of that event.
|
||||||
|
# If this is the case the we ignore the previous event. This will
|
||||||
|
# cause the displayname calculations on the client to be incorrect
|
||||||
|
if prev_event_id is None or not prev_content or not prev_sender:
|
||||||
|
logger.debug(
|
||||||
|
"Removing %r from the state dict, as it is missing"
|
||||||
|
" prev_content (prev_event_id=%r)",
|
||||||
|
timeline_event.event_id, prev_event_id
|
||||||
|
)
|
||||||
del result[event_key]
|
del result[event_key]
|
||||||
else:
|
else:
|
||||||
|
logger.debug(
|
||||||
|
"Replacing %r with %r in state dict",
|
||||||
|
timeline_event.event_id, prev_event_id
|
||||||
|
)
|
||||||
result[event_key] = FrozenEvent({
|
result[event_key] = FrozenEvent({
|
||||||
"type": timeline_event.type,
|
"type": timeline_event.type,
|
||||||
"state_key": timeline_event.state_key,
|
"state_key": timeline_event.state_key,
|
||||||
"content": timeline_event.unsigned['prev_content'],
|
"content": prev_content,
|
||||||
"sender": timeline_event.unsigned['prev_sender'],
|
"sender": prev_sender,
|
||||||
"event_id": prev_event_id,
|
"event_id": prev_event_id,
|
||||||
"room_id": timeline_event.room_id,
|
"room_id": timeline_event.room_id,
|
||||||
})
|
})
|
||||||
|
|
||||||
logger.debug("New value: %r", result.get(event_key))
|
logger.debug("New value: %r", result.get(event_key))
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
|
@ -13,7 +13,7 @@
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from ._base import client_v2_pattern
|
from ._base import client_v2_patterns
|
||||||
|
|
||||||
from synapse.http.servlet import RestServlet
|
from synapse.http.servlet import RestServlet
|
||||||
from synapse.api.errors import AuthError, SynapseError
|
from synapse.api.errors import AuthError, SynapseError
|
||||||
|
@ -31,7 +31,7 @@ class TagListServlet(RestServlet):
|
||||||
"""
|
"""
|
||||||
GET /user/{user_id}/rooms/{room_id}/tags HTTP/1.1
|
GET /user/{user_id}/rooms/{room_id}/tags HTTP/1.1
|
||||||
"""
|
"""
|
||||||
PATTERN = client_v2_pattern(
|
PATTERNS = client_v2_patterns(
|
||||||
"/user/(?P<user_id>[^/]*)/rooms/(?P<room_id>[^/]*)/tags"
|
"/user/(?P<user_id>[^/]*)/rooms/(?P<room_id>[^/]*)/tags"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -56,7 +56,7 @@ class TagServlet(RestServlet):
|
||||||
PUT /user/{user_id}/rooms/{room_id}/tags/{tag} HTTP/1.1
|
PUT /user/{user_id}/rooms/{room_id}/tags/{tag} HTTP/1.1
|
||||||
DELETE /user/{user_id}/rooms/{room_id}/tags/{tag} HTTP/1.1
|
DELETE /user/{user_id}/rooms/{room_id}/tags/{tag} HTTP/1.1
|
||||||
"""
|
"""
|
||||||
PATTERN = client_v2_pattern(
|
PATTERNS = client_v2_patterns(
|
||||||
"/user/(?P<user_id>[^/]*)/rooms/(?P<room_id>[^/]*)/tags/(?P<tag>[^/]*)"
|
"/user/(?P<user_id>[^/]*)/rooms/(?P<room_id>[^/]*)/tags/(?P<tag>[^/]*)"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -18,7 +18,7 @@ from twisted.internet import defer
|
||||||
from synapse.api.errors import AuthError, StoreError, SynapseError
|
from synapse.api.errors import AuthError, StoreError, SynapseError
|
||||||
from synapse.http.servlet import RestServlet
|
from synapse.http.servlet import RestServlet
|
||||||
|
|
||||||
from ._base import client_v2_pattern, parse_json_dict_from_request
|
from ._base import client_v2_patterns, parse_json_dict_from_request
|
||||||
|
|
||||||
|
|
||||||
class TokenRefreshRestServlet(RestServlet):
|
class TokenRefreshRestServlet(RestServlet):
|
||||||
|
@ -26,7 +26,7 @@ class TokenRefreshRestServlet(RestServlet):
|
||||||
Exchanges refresh tokens for a pair of an access token and a new refresh
|
Exchanges refresh tokens for a pair of an access token and a new refresh
|
||||||
token.
|
token.
|
||||||
"""
|
"""
|
||||||
PATTERN = client_v2_pattern("/tokenrefresh")
|
PATTERNS = client_v2_patterns("/tokenrefresh")
|
||||||
|
|
||||||
def __init__(self, hs):
|
def __init__(self, hs):
|
||||||
super(TokenRefreshRestServlet, self).__init__()
|
super(TokenRefreshRestServlet, self).__init__()
|
||||||
|
|
|
@ -71,8 +71,7 @@ class BaseHomeServer(object):
|
||||||
'state_handler',
|
'state_handler',
|
||||||
'notifier',
|
'notifier',
|
||||||
'distributor',
|
'distributor',
|
||||||
'resource_for_client',
|
'client_resource',
|
||||||
'resource_for_client_v2_alpha',
|
|
||||||
'resource_for_federation',
|
'resource_for_federation',
|
||||||
'resource_for_static_content',
|
'resource_for_static_content',
|
||||||
'resource_for_web_client',
|
'resource_for_web_client',
|
||||||
|
|
|
@ -42,6 +42,7 @@ from .end_to_end_keys import EndToEndKeyStore
|
||||||
from .receipts import ReceiptsStore
|
from .receipts import ReceiptsStore
|
||||||
from .search import SearchStore
|
from .search import SearchStore
|
||||||
from .tags import TagsStore
|
from .tags import TagsStore
|
||||||
|
from .account_data import AccountDataStore
|
||||||
|
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
@ -73,6 +74,7 @@ class DataStore(RoomMemberStore, RoomStore,
|
||||||
EndToEndKeyStore,
|
EndToEndKeyStore,
|
||||||
SearchStore,
|
SearchStore,
|
||||||
TagsStore,
|
TagsStore,
|
||||||
|
AccountDataStore,
|
||||||
):
|
):
|
||||||
|
|
||||||
def __init__(self, hs):
|
def __init__(self, hs):
|
||||||
|
|
|
@ -214,7 +214,8 @@ class SQLBaseStore(object):
|
||||||
|
|
||||||
self._clock.looping_call(loop, 10000)
|
self._clock.looping_call(loop, 10000)
|
||||||
|
|
||||||
def _new_transaction(self, conn, desc, after_callbacks, func, *args, **kwargs):
|
def _new_transaction(self, conn, desc, after_callbacks, logging_context,
|
||||||
|
func, *args, **kwargs):
|
||||||
start = time.time() * 1000
|
start = time.time() * 1000
|
||||||
txn_id = self._TXN_ID
|
txn_id = self._TXN_ID
|
||||||
|
|
||||||
|
@ -277,6 +278,9 @@ class SQLBaseStore(object):
|
||||||
end = time.time() * 1000
|
end = time.time() * 1000
|
||||||
duration = end - start
|
duration = end - start
|
||||||
|
|
||||||
|
if logging_context is not None:
|
||||||
|
logging_context.add_database_transaction(duration)
|
||||||
|
|
||||||
transaction_logger.debug("[TXN END] {%s} %f", name, duration)
|
transaction_logger.debug("[TXN END] {%s} %f", name, duration)
|
||||||
|
|
||||||
self._current_txn_total_time += duration
|
self._current_txn_total_time += duration
|
||||||
|
@ -302,7 +306,8 @@ class SQLBaseStore(object):
|
||||||
|
|
||||||
current_context.copy_to(context)
|
current_context.copy_to(context)
|
||||||
return self._new_transaction(
|
return self._new_transaction(
|
||||||
conn, desc, after_callbacks, func, *args, **kwargs
|
conn, desc, after_callbacks, current_context,
|
||||||
|
func, *args, **kwargs
|
||||||
)
|
)
|
||||||
|
|
||||||
result = yield preserve_context_over_fn(
|
result = yield preserve_context_over_fn(
|
||||||
|
|
211
synapse/storage/account_data.py
Normal file
211
synapse/storage/account_data.py
Normal file
|
@ -0,0 +1,211 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Copyright 2014, 2015 OpenMarket Ltd
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from ._base import SQLBaseStore
|
||||||
|
from twisted.internet import defer
|
||||||
|
|
||||||
|
import ujson as json
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class AccountDataStore(SQLBaseStore):
|
||||||
|
|
||||||
|
def get_account_data_for_user(self, user_id):
|
||||||
|
"""Get all the client account_data for a user.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
user_id(str): The user to get the account_data for.
|
||||||
|
Returns:
|
||||||
|
A deferred pair of a dict of global account_data and a dict
|
||||||
|
mapping from room_id string to per room account_data dicts.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def get_account_data_for_user_txn(txn):
|
||||||
|
rows = self._simple_select_list_txn(
|
||||||
|
txn, "account_data", {"user_id": user_id},
|
||||||
|
["account_data_type", "content"]
|
||||||
|
)
|
||||||
|
|
||||||
|
global_account_data = {
|
||||||
|
row["account_data_type"]: json.loads(row["content"]) for row in rows
|
||||||
|
}
|
||||||
|
|
||||||
|
rows = self._simple_select_list_txn(
|
||||||
|
txn, "room_account_data", {"user_id": user_id},
|
||||||
|
["room_id", "account_data_type", "content"]
|
||||||
|
)
|
||||||
|
|
||||||
|
by_room = {}
|
||||||
|
for row in rows:
|
||||||
|
room_data = by_room.setdefault(row["room_id"], {})
|
||||||
|
room_data[row["account_data_type"]] = json.loads(row["content"])
|
||||||
|
|
||||||
|
return (global_account_data, by_room)
|
||||||
|
|
||||||
|
return self.runInteraction(
|
||||||
|
"get_account_data_for_user", get_account_data_for_user_txn
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_account_data_for_room(self, user_id, room_id):
|
||||||
|
"""Get all the client account_data for a user for a room.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
user_id(str): The user to get the account_data for.
|
||||||
|
room_id(str): The room to get the account_data for.
|
||||||
|
Returns:
|
||||||
|
A deferred dict of the room account_data
|
||||||
|
"""
|
||||||
|
def get_account_data_for_room_txn(txn):
|
||||||
|
rows = self._simple_select_list_txn(
|
||||||
|
txn, "room_account_data", {"user_id": user_id, "room_id": room_id},
|
||||||
|
["account_data_type", "content"]
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
row["account_data_type"]: json.loads(row["content"]) for row in rows
|
||||||
|
}
|
||||||
|
|
||||||
|
return self.runInteraction(
|
||||||
|
"get_account_data_for_room", get_account_data_for_room_txn
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_updated_account_data_for_user(self, user_id, stream_id):
|
||||||
|
"""Get all the client account_data for a that's changed.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
user_id(str): The user to get the account_data for.
|
||||||
|
stream_id(int): The point in the stream since which to get updates
|
||||||
|
Returns:
|
||||||
|
A deferred pair of a dict of global account_data and a dict
|
||||||
|
mapping from room_id string to per room account_data dicts.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def get_updated_account_data_for_user_txn(txn):
|
||||||
|
sql = (
|
||||||
|
"SELECT account_data_type, content FROM account_data"
|
||||||
|
" WHERE user_id = ? AND stream_id > ?"
|
||||||
|
)
|
||||||
|
|
||||||
|
txn.execute(sql, (user_id, stream_id))
|
||||||
|
|
||||||
|
global_account_data = {
|
||||||
|
row[0]: json.loads(row[1]) for row in txn.fetchall()
|
||||||
|
}
|
||||||
|
|
||||||
|
sql = (
|
||||||
|
"SELECT room_id, account_data_type, content FROM room_account_data"
|
||||||
|
" WHERE user_id = ? AND stream_id > ?"
|
||||||
|
)
|
||||||
|
|
||||||
|
txn.execute(sql, (user_id, stream_id))
|
||||||
|
|
||||||
|
account_data_by_room = {}
|
||||||
|
for row in txn.fetchall():
|
||||||
|
room_account_data = account_data_by_room.setdefault(row[0], {})
|
||||||
|
room_account_data[row[1]] = json.loads(row[2])
|
||||||
|
|
||||||
|
return (global_account_data, account_data_by_room)
|
||||||
|
|
||||||
|
return self.runInteraction(
|
||||||
|
"get_updated_account_data_for_user", get_updated_account_data_for_user_txn
|
||||||
|
)
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def add_account_data_to_room(self, user_id, room_id, account_data_type, content):
|
||||||
|
"""Add some account_data to a room for a user.
|
||||||
|
Args:
|
||||||
|
user_id(str): The user to add a tag for.
|
||||||
|
room_id(str): The room to add a tag for.
|
||||||
|
account_data_type(str): The type of account_data to add.
|
||||||
|
content(dict): A json object to associate with the tag.
|
||||||
|
Returns:
|
||||||
|
A deferred that completes once the account_data has been added.
|
||||||
|
"""
|
||||||
|
content_json = json.dumps(content)
|
||||||
|
|
||||||
|
def add_account_data_txn(txn, next_id):
|
||||||
|
self._simple_upsert_txn(
|
||||||
|
txn,
|
||||||
|
table="room_account_data",
|
||||||
|
keyvalues={
|
||||||
|
"user_id": user_id,
|
||||||
|
"room_id": room_id,
|
||||||
|
"account_data_type": account_data_type,
|
||||||
|
},
|
||||||
|
values={
|
||||||
|
"stream_id": next_id,
|
||||||
|
"content": content_json,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
self._update_max_stream_id(txn, next_id)
|
||||||
|
|
||||||
|
with (yield self._account_data_id_gen.get_next(self)) as next_id:
|
||||||
|
yield self.runInteraction(
|
||||||
|
"add_room_account_data", add_account_data_txn, next_id
|
||||||
|
)
|
||||||
|
|
||||||
|
result = yield self._account_data_id_gen.get_max_token(self)
|
||||||
|
defer.returnValue(result)
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def add_account_data_for_user(self, user_id, account_data_type, content):
|
||||||
|
"""Add some account_data to a room for a user.
|
||||||
|
Args:
|
||||||
|
user_id(str): The user to add a tag for.
|
||||||
|
account_data_type(str): The type of account_data to add.
|
||||||
|
content(dict): A json object to associate with the tag.
|
||||||
|
Returns:
|
||||||
|
A deferred that completes once the account_data has been added.
|
||||||
|
"""
|
||||||
|
content_json = json.dumps(content)
|
||||||
|
|
||||||
|
def add_account_data_txn(txn, next_id):
|
||||||
|
self._simple_upsert_txn(
|
||||||
|
txn,
|
||||||
|
table="account_data",
|
||||||
|
keyvalues={
|
||||||
|
"user_id": user_id,
|
||||||
|
"account_data_type": account_data_type,
|
||||||
|
},
|
||||||
|
values={
|
||||||
|
"stream_id": next_id,
|
||||||
|
"content": content_json,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
self._update_max_stream_id(txn, next_id)
|
||||||
|
|
||||||
|
with (yield self._account_data_id_gen.get_next(self)) as next_id:
|
||||||
|
yield self.runInteraction(
|
||||||
|
"add_user_account_data", add_account_data_txn, next_id
|
||||||
|
)
|
||||||
|
|
||||||
|
result = yield self._account_data_id_gen.get_max_token(self)
|
||||||
|
defer.returnValue(result)
|
||||||
|
|
||||||
|
def _update_max_stream_id(self, txn, next_id):
|
||||||
|
"""Update the max stream_id
|
||||||
|
|
||||||
|
Args:
|
||||||
|
txn: The database cursor
|
||||||
|
next_id(int): The the revision to advance to.
|
||||||
|
"""
|
||||||
|
update_max_id_sql = (
|
||||||
|
"UPDATE account_data_max_stream_id"
|
||||||
|
" SET stream_id = ?"
|
||||||
|
" WHERE stream_id < ?"
|
||||||
|
)
|
||||||
|
txn.execute(update_max_id_sql, (next_id, next_id))
|
|
@ -51,6 +51,14 @@ EVENT_QUEUE_TIMEOUT_S = 0.1 # Timeout when waiting for requests for events
|
||||||
|
|
||||||
|
|
||||||
class EventsStore(SQLBaseStore):
|
class EventsStore(SQLBaseStore):
|
||||||
|
EVENT_ORIGIN_SERVER_TS_NAME = "event_origin_server_ts"
|
||||||
|
|
||||||
|
def __init__(self, hs):
|
||||||
|
super(EventsStore, self).__init__(hs)
|
||||||
|
self.register_background_update_handler(
|
||||||
|
self.EVENT_ORIGIN_SERVER_TS_NAME, self._background_reindex_origin_server_ts
|
||||||
|
)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def persist_events(self, events_and_contexts, backfilled=False,
|
def persist_events(self, events_and_contexts, backfilled=False,
|
||||||
is_new_state=True):
|
is_new_state=True):
|
||||||
|
@ -365,6 +373,7 @@ class EventsStore(SQLBaseStore):
|
||||||
"processed": True,
|
"processed": True,
|
||||||
"outlier": event.internal_metadata.is_outlier(),
|
"outlier": event.internal_metadata.is_outlier(),
|
||||||
"content": encode_json(event.content).decode("UTF-8"),
|
"content": encode_json(event.content).decode("UTF-8"),
|
||||||
|
"origin_server_ts": int(event.origin_server_ts),
|
||||||
}
|
}
|
||||||
for event, _ in events_and_contexts
|
for event, _ in events_and_contexts
|
||||||
],
|
],
|
||||||
|
@ -640,7 +649,7 @@ class EventsStore(SQLBaseStore):
|
||||||
]
|
]
|
||||||
|
|
||||||
rows = self._new_transaction(
|
rows = self._new_transaction(
|
||||||
conn, "do_fetch", [], self._fetch_event_rows, event_ids
|
conn, "do_fetch", [], None, self._fetch_event_rows, event_ids
|
||||||
)
|
)
|
||||||
|
|
||||||
row_dict = {
|
row_dict = {
|
||||||
|
@ -964,3 +973,71 @@ class EventsStore(SQLBaseStore):
|
||||||
|
|
||||||
ret = yield self.runInteraction("count_messages", _count_messages)
|
ret = yield self.runInteraction("count_messages", _count_messages)
|
||||||
defer.returnValue(ret)
|
defer.returnValue(ret)
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def _background_reindex_origin_server_ts(self, progress, batch_size):
|
||||||
|
target_min_stream_id = progress["target_min_stream_id_inclusive"]
|
||||||
|
max_stream_id = progress["max_stream_id_exclusive"]
|
||||||
|
rows_inserted = progress.get("rows_inserted", 0)
|
||||||
|
|
||||||
|
INSERT_CLUMP_SIZE = 1000
|
||||||
|
|
||||||
|
def reindex_search_txn(txn):
|
||||||
|
sql = (
|
||||||
|
"SELECT stream_ordering, event_id FROM events"
|
||||||
|
" WHERE ? <= stream_ordering AND stream_ordering < ?"
|
||||||
|
" ORDER BY stream_ordering DESC"
|
||||||
|
" LIMIT ?"
|
||||||
|
)
|
||||||
|
|
||||||
|
txn.execute(sql, (target_min_stream_id, max_stream_id, batch_size))
|
||||||
|
|
||||||
|
rows = txn.fetchall()
|
||||||
|
if not rows:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
min_stream_id = rows[-1][0]
|
||||||
|
event_ids = [row[1] for row in rows]
|
||||||
|
|
||||||
|
events = self._get_events_txn(txn, event_ids)
|
||||||
|
|
||||||
|
rows = []
|
||||||
|
for event in events:
|
||||||
|
try:
|
||||||
|
event_id = event.event_id
|
||||||
|
origin_server_ts = event.origin_server_ts
|
||||||
|
except (KeyError, AttributeError):
|
||||||
|
# If the event is missing a necessary field then
|
||||||
|
# skip over it.
|
||||||
|
continue
|
||||||
|
|
||||||
|
rows.append((origin_server_ts, event_id))
|
||||||
|
|
||||||
|
sql = (
|
||||||
|
"UPDATE events SET origin_server_ts = ? WHERE event_id = ?"
|
||||||
|
)
|
||||||
|
|
||||||
|
for index in range(0, len(rows), INSERT_CLUMP_SIZE):
|
||||||
|
clump = rows[index:index + INSERT_CLUMP_SIZE]
|
||||||
|
txn.executemany(sql, clump)
|
||||||
|
|
||||||
|
progress = {
|
||||||
|
"target_min_stream_id_inclusive": target_min_stream_id,
|
||||||
|
"max_stream_id_exclusive": min_stream_id,
|
||||||
|
"rows_inserted": rows_inserted + len(rows)
|
||||||
|
}
|
||||||
|
|
||||||
|
self._background_update_progress_txn(
|
||||||
|
txn, self.EVENT_ORIGIN_SERVER_TS_NAME, progress
|
||||||
|
)
|
||||||
|
|
||||||
|
return len(rows)
|
||||||
|
|
||||||
|
result = yield self.runInteraction(
|
||||||
|
self.EVENT_ORIGIN_SERVER_TS_NAME, reindex_search_txn
|
||||||
|
)
|
||||||
|
|
||||||
|
if not result:
|
||||||
|
yield self._end_background_update(self.EVENT_ORIGIN_SERVER_TS_NAME)
|
||||||
|
|
||||||
|
defer.returnValue(result)
|
||||||
|
|
|
@ -25,7 +25,7 @@ logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
# Remember to update this number every time a change is made to database
|
# Remember to update this number every time a change is made to database
|
||||||
# schema files, so the users will be informed on server restarts.
|
# schema files, so the users will be informed on server restarts.
|
||||||
SCHEMA_VERSION = 26
|
SCHEMA_VERSION = 27
|
||||||
|
|
||||||
dir_path = os.path.abspath(os.path.dirname(__file__))
|
dir_path = os.path.abspath(os.path.dirname(__file__))
|
||||||
|
|
||||||
|
|
|
@ -258,10 +258,10 @@ class RegistrationStore(SQLBaseStore):
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def user_add_threepid(self, user_id, medium, address, validated_at, added_at):
|
def user_add_threepid(self, user_id, medium, address, validated_at, added_at):
|
||||||
yield self._simple_upsert("user_threepids", {
|
yield self._simple_upsert("user_threepids", {
|
||||||
"user_id": user_id,
|
|
||||||
"medium": medium,
|
"medium": medium,
|
||||||
"address": address,
|
"address": address,
|
||||||
}, {
|
}, {
|
||||||
|
"user_id": user_id,
|
||||||
"validated_at": validated_at,
|
"validated_at": validated_at,
|
||||||
"added_at": added_at,
|
"added_at": added_at,
|
||||||
})
|
})
|
||||||
|
|
|
@ -18,7 +18,7 @@ from twisted.internet import defer
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
|
|
||||||
from ._base import SQLBaseStore
|
from ._base import SQLBaseStore
|
||||||
from synapse.util.caches.descriptors import cached
|
from synapse.util.caches.descriptors import cached, cachedInlineCallbacks
|
||||||
|
|
||||||
from synapse.api.constants import Membership
|
from synapse.api.constants import Membership
|
||||||
from synapse.types import UserID
|
from synapse.types import UserID
|
||||||
|
@ -121,7 +121,7 @@ class RoomMemberStore(SQLBaseStore):
|
||||||
return self.get_rooms_for_user_where_membership_is(
|
return self.get_rooms_for_user_where_membership_is(
|
||||||
user_id, [Membership.INVITE]
|
user_id, [Membership.INVITE]
|
||||||
).addCallback(lambda invites: self._get_events([
|
).addCallback(lambda invites: self._get_events([
|
||||||
invites.event_id for invite in invites
|
invite.event_id for invite in invites
|
||||||
]))
|
]))
|
||||||
|
|
||||||
def get_leave_and_ban_events_for_user(self, user_id):
|
def get_leave_and_ban_events_for_user(self, user_id):
|
||||||
|
@ -160,7 +160,7 @@ class RoomMemberStore(SQLBaseStore):
|
||||||
|
|
||||||
def _get_rooms_for_user_where_membership_is_txn(self, txn, user_id,
|
def _get_rooms_for_user_where_membership_is_txn(self, txn, user_id,
|
||||||
membership_list):
|
membership_list):
|
||||||
where_clause = "user_id = ? AND (%s)" % (
|
where_clause = "user_id = ? AND (%s) AND forgotten = 0" % (
|
||||||
" OR ".join(["membership = ?" for _ in membership_list]),
|
" OR ".join(["membership = ?" for _ in membership_list]),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -269,3 +269,70 @@ class RoomMemberStore(SQLBaseStore):
|
||||||
ret = len(room_id_lists.pop(0).intersection(*room_id_lists)) > 0
|
ret = len(room_id_lists.pop(0).intersection(*room_id_lists)) > 0
|
||||||
|
|
||||||
defer.returnValue(ret)
|
defer.returnValue(ret)
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def forget(self, user_id, room_id):
|
||||||
|
"""Indicate that user_id wishes to discard history for room_id."""
|
||||||
|
def f(txn):
|
||||||
|
sql = (
|
||||||
|
"UPDATE"
|
||||||
|
" room_memberships"
|
||||||
|
" SET"
|
||||||
|
" forgotten = 1"
|
||||||
|
" WHERE"
|
||||||
|
" user_id = ?"
|
||||||
|
" AND"
|
||||||
|
" room_id = ?"
|
||||||
|
)
|
||||||
|
txn.execute(sql, (user_id, room_id))
|
||||||
|
yield self.runInteraction("forget_membership", f)
|
||||||
|
self.was_forgotten_at.invalidate_all()
|
||||||
|
self.did_forget.invalidate((user_id, room_id))
|
||||||
|
|
||||||
|
@cachedInlineCallbacks(num_args=2)
|
||||||
|
def did_forget(self, user_id, room_id):
|
||||||
|
"""Returns whether user_id has elected to discard history for room_id.
|
||||||
|
|
||||||
|
Returns False if they have since re-joined."""
|
||||||
|
def f(txn):
|
||||||
|
sql = (
|
||||||
|
"SELECT"
|
||||||
|
" COUNT(*)"
|
||||||
|
" FROM"
|
||||||
|
" room_memberships"
|
||||||
|
" WHERE"
|
||||||
|
" user_id = ?"
|
||||||
|
" AND"
|
||||||
|
" room_id = ?"
|
||||||
|
" AND"
|
||||||
|
" forgotten = 0"
|
||||||
|
)
|
||||||
|
txn.execute(sql, (user_id, room_id))
|
||||||
|
rows = txn.fetchall()
|
||||||
|
return rows[0][0]
|
||||||
|
count = yield self.runInteraction("did_forget_membership", f)
|
||||||
|
defer.returnValue(count == 0)
|
||||||
|
|
||||||
|
@cachedInlineCallbacks(num_args=3)
|
||||||
|
def was_forgotten_at(self, user_id, room_id, event_id):
|
||||||
|
"""Returns whether user_id has elected to discard history for room_id at event_id.
|
||||||
|
|
||||||
|
event_id must be a membership event."""
|
||||||
|
def f(txn):
|
||||||
|
sql = (
|
||||||
|
"SELECT"
|
||||||
|
" forgotten"
|
||||||
|
" FROM"
|
||||||
|
" room_memberships"
|
||||||
|
" WHERE"
|
||||||
|
" user_id = ?"
|
||||||
|
" AND"
|
||||||
|
" room_id = ?"
|
||||||
|
" AND"
|
||||||
|
" event_id = ?"
|
||||||
|
)
|
||||||
|
txn.execute(sql, (user_id, room_id, event_id))
|
||||||
|
rows = txn.fetchall()
|
||||||
|
return rows[0][0]
|
||||||
|
forgot = yield self.runInteraction("did_forget_membership_at", f)
|
||||||
|
defer.returnValue(forgot == 1)
|
||||||
|
|
|
@ -1,23 +1,22 @@
|
||||||
-- Drop, copy & recreate pushers table to change unique key
|
-- Drop, copy & recreate pushers table to change unique key
|
||||||
-- Also add access_token column at the same time
|
-- Also add access_token column at the same time
|
||||||
CREATE TABLE IF NOT EXISTS pushers2 (
|
CREATE TABLE IF NOT EXISTS pushers2 (
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
id BIGINT PRIMARY KEY,
|
||||||
user_name TEXT NOT NULL,
|
user_name TEXT NOT NULL,
|
||||||
access_token INTEGER DEFAULT NULL,
|
access_token BIGINT DEFAULT NULL,
|
||||||
profile_tag varchar(32) NOT NULL,
|
profile_tag VARCHAR(32) NOT NULL,
|
||||||
kind varchar(8) NOT NULL,
|
kind VARCHAR(8) NOT NULL,
|
||||||
app_id varchar(64) NOT NULL,
|
app_id VARCHAR(64) NOT NULL,
|
||||||
app_display_name varchar(64) NOT NULL,
|
app_display_name VARCHAR(64) NOT NULL,
|
||||||
device_display_name varchar(128) NOT NULL,
|
device_display_name VARCHAR(128) NOT NULL,
|
||||||
pushkey blob NOT NULL,
|
pushkey bytea NOT NULL,
|
||||||
ts BIGINT NOT NULL,
|
ts BIGINT NOT NULL,
|
||||||
lang varchar(8),
|
lang VARCHAR(8),
|
||||||
data blob,
|
data bytea,
|
||||||
last_token TEXT,
|
last_token TEXT,
|
||||||
last_success BIGINT,
|
last_success BIGINT,
|
||||||
failing_since BIGINT,
|
failing_since BIGINT,
|
||||||
FOREIGN KEY(user_name) REFERENCES users(name),
|
UNIQUE (app_id, pushkey)
|
||||||
UNIQUE (app_id, pushkey, user_name)
|
|
||||||
);
|
);
|
||||||
INSERT INTO pushers2 (id, user_name, profile_tag, kind, app_id, app_display_name, device_display_name, pushkey, ts, lang, data, last_token, last_success, failing_since)
|
INSERT INTO pushers2 (id, user_name, profile_tag, kind, app_id, app_display_name, device_display_name, pushkey, ts, lang, data, last_token, last_success, failing_since)
|
||||||
SELECT id, user_name, profile_tag, kind, app_id, app_display_name, device_display_name, pushkey, ts, lang, data, last_token, last_success, failing_since FROM pushers;
|
SELECT id, user_name, profile_tag, kind, app_id, app_display_name, device_display_name, pushkey, ts, lang, data, last_token, last_success, failing_since FROM pushers;
|
||||||
|
|
|
@ -38,7 +38,7 @@ CREATE INDEX event_search_ev_ridx ON event_search(room_id);
|
||||||
|
|
||||||
|
|
||||||
SQLITE_TABLE = (
|
SQLITE_TABLE = (
|
||||||
"CREATE VIRTUAL TABLE IF NOT EXISTS event_search"
|
"CREATE VIRTUAL TABLE event_search"
|
||||||
" USING fts4 ( event_id, room_id, sender, key, value )"
|
" USING fts4 ( event_id, room_id, sender, key, value )"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
36
synapse/storage/schema/delta/27/account_data.sql
Normal file
36
synapse/storage/schema/delta/27/account_data.sql
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
/* Copyright 2015 OpenMarket Ltd
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS account_data(
|
||||||
|
user_id TEXT NOT NULL,
|
||||||
|
account_data_type TEXT NOT NULL, -- The type of the account_data.
|
||||||
|
stream_id BIGINT NOT NULL, -- The version of the account_data.
|
||||||
|
content TEXT NOT NULL, -- The JSON content of the account_data
|
||||||
|
CONSTRAINT account_data_uniqueness UNIQUE (user_id, account_data_type)
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS room_account_data(
|
||||||
|
user_id TEXT NOT NULL,
|
||||||
|
room_id TEXT NOT NULL,
|
||||||
|
account_data_type TEXT NOT NULL, -- The type of the account_data.
|
||||||
|
stream_id BIGINT NOT NULL, -- The version of the account_data.
|
||||||
|
content TEXT NOT NULL, -- The JSON content of the account_data
|
||||||
|
CONSTRAINT room_account_data_uniqueness UNIQUE (user_id, room_id, account_data_type)
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
CREATE INDEX account_data_stream_id on account_data(user_id, stream_id);
|
||||||
|
CREATE INDEX room_account_data_stream_id on room_account_data(user_id, stream_id);
|
26
synapse/storage/schema/delta/27/forgotten_memberships.sql
Normal file
26
synapse/storage/schema/delta/27/forgotten_memberships.sql
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
/* Copyright 2015 OpenMarket Ltd
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Keeps track of what rooms users have left and don't want to be able to
|
||||||
|
* access again.
|
||||||
|
*
|
||||||
|
* If all users on this server have left a room, we can delete the room
|
||||||
|
* entirely.
|
||||||
|
*
|
||||||
|
* This column should always contain either 0 or 1.
|
||||||
|
*/
|
||||||
|
|
||||||
|
ALTER TABLE room_memberships ADD COLUMN forgotten INTEGER DEFAULT 0;
|
57
synapse/storage/schema/delta/27/ts.py
Normal file
57
synapse/storage/schema/delta/27/ts.py
Normal file
|
@ -0,0 +1,57 @@
|
||||||
|
# Copyright 2015 OpenMarket Ltd
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from synapse.storage.prepare_database import get_statements
|
||||||
|
|
||||||
|
import ujson
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
ALTER_TABLE = (
|
||||||
|
"ALTER TABLE events ADD COLUMN origin_server_ts BIGINT;"
|
||||||
|
"CREATE INDEX events_ts ON events(origin_server_ts, stream_ordering);"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def run_upgrade(cur, database_engine, *args, **kwargs):
|
||||||
|
for statement in get_statements(ALTER_TABLE.splitlines()):
|
||||||
|
cur.execute(statement)
|
||||||
|
|
||||||
|
cur.execute("SELECT MIN(stream_ordering) FROM events")
|
||||||
|
rows = cur.fetchall()
|
||||||
|
min_stream_id = rows[0][0]
|
||||||
|
|
||||||
|
cur.execute("SELECT MAX(stream_ordering) FROM events")
|
||||||
|
rows = cur.fetchall()
|
||||||
|
max_stream_id = rows[0][0]
|
||||||
|
|
||||||
|
if min_stream_id is not None and max_stream_id is not None:
|
||||||
|
progress = {
|
||||||
|
"target_min_stream_id_inclusive": min_stream_id,
|
||||||
|
"max_stream_id_exclusive": max_stream_id + 1,
|
||||||
|
"rows_inserted": 0,
|
||||||
|
}
|
||||||
|
progress_json = ujson.dumps(progress)
|
||||||
|
|
||||||
|
sql = (
|
||||||
|
"INSERT into background_updates (update_name, progress_json)"
|
||||||
|
" VALUES (?, ?)"
|
||||||
|
)
|
||||||
|
|
||||||
|
sql = database_engine.convert_param_style(sql)
|
||||||
|
|
||||||
|
cur.execute(sql, ("event_origin_server_ts", progress_json))
|
|
@ -20,6 +20,7 @@ from synapse.api.errors import SynapseError
|
||||||
from synapse.storage.engines import PostgresEngine, Sqlite3Engine
|
from synapse.storage.engines import PostgresEngine, Sqlite3Engine
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
import re
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -84,6 +85,11 @@ class SearchStore(BackgroundUpdateStore):
|
||||||
# skip over it.
|
# skip over it.
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
if not isinstance(value, basestring):
|
||||||
|
# If the event body, name or topic isn't a string
|
||||||
|
# then skip over it
|
||||||
|
continue
|
||||||
|
|
||||||
event_search_rows.append((event_id, room_id, key, value))
|
event_search_rows.append((event_id, room_id, key, value))
|
||||||
|
|
||||||
if isinstance(self.database_engine, PostgresEngine):
|
if isinstance(self.database_engine, PostgresEngine):
|
||||||
|
@ -139,6 +145,9 @@ class SearchStore(BackgroundUpdateStore):
|
||||||
list of dicts
|
list of dicts
|
||||||
"""
|
"""
|
||||||
clauses = []
|
clauses = []
|
||||||
|
|
||||||
|
search_query = search_query = _parse_query(self.database_engine, search_term)
|
||||||
|
|
||||||
args = []
|
args = []
|
||||||
|
|
||||||
# Make sure we don't explode because the person is in too many rooms.
|
# Make sure we don't explode because the person is in too many rooms.
|
||||||
|
@ -158,18 +167,36 @@ class SearchStore(BackgroundUpdateStore):
|
||||||
"(%s)" % (" OR ".join(local_clauses),)
|
"(%s)" % (" OR ".join(local_clauses),)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
count_args = args
|
||||||
|
count_clauses = clauses
|
||||||
|
|
||||||
if isinstance(self.database_engine, PostgresEngine):
|
if isinstance(self.database_engine, PostgresEngine):
|
||||||
sql = (
|
sql = (
|
||||||
"SELECT ts_rank_cd(vector, query) AS rank, room_id, event_id"
|
"SELECT ts_rank_cd(vector, to_tsquery('english', ?)) AS rank,"
|
||||||
" FROM plainto_tsquery('english', ?) as query, event_search"
|
" room_id, event_id"
|
||||||
" WHERE vector @@ query"
|
" FROM event_search"
|
||||||
|
" WHERE vector @@ to_tsquery('english', ?)"
|
||||||
)
|
)
|
||||||
|
args = [search_query, search_query] + args
|
||||||
|
|
||||||
|
count_sql = (
|
||||||
|
"SELECT room_id, count(*) as count FROM event_search"
|
||||||
|
" WHERE vector @@ to_tsquery('english', ?)"
|
||||||
|
)
|
||||||
|
count_args = [search_query] + count_args
|
||||||
elif isinstance(self.database_engine, Sqlite3Engine):
|
elif isinstance(self.database_engine, Sqlite3Engine):
|
||||||
sql = (
|
sql = (
|
||||||
"SELECT rank(matchinfo(event_search)) as rank, room_id, event_id"
|
"SELECT rank(matchinfo(event_search)) as rank, room_id, event_id"
|
||||||
" FROM event_search"
|
" FROM event_search"
|
||||||
" WHERE value MATCH ?"
|
" WHERE value MATCH ?"
|
||||||
)
|
)
|
||||||
|
args = [search_query] + args
|
||||||
|
|
||||||
|
count_sql = (
|
||||||
|
"SELECT room_id, count(*) as count FROM event_search"
|
||||||
|
" WHERE value MATCH ?"
|
||||||
|
)
|
||||||
|
count_args = [search_term] + count_args
|
||||||
else:
|
else:
|
||||||
# This should be unreachable.
|
# This should be unreachable.
|
||||||
raise Exception("Unrecognized database engine")
|
raise Exception("Unrecognized database engine")
|
||||||
|
@ -177,12 +204,15 @@ class SearchStore(BackgroundUpdateStore):
|
||||||
for clause in clauses:
|
for clause in clauses:
|
||||||
sql += " AND " + clause
|
sql += " AND " + clause
|
||||||
|
|
||||||
|
for clause in count_clauses:
|
||||||
|
count_sql += " AND " + clause
|
||||||
|
|
||||||
# We add an arbitrary limit here to ensure we don't try to pull the
|
# We add an arbitrary limit here to ensure we don't try to pull the
|
||||||
# entire table from the database.
|
# entire table from the database.
|
||||||
sql += " ORDER BY rank DESC LIMIT 500"
|
sql += " ORDER BY rank DESC LIMIT 500"
|
||||||
|
|
||||||
results = yield self._execute(
|
results = yield self._execute(
|
||||||
"search_msgs", self.cursor_to_dict, sql, *([search_term] + args)
|
"search_msgs", self.cursor_to_dict, sql, *args
|
||||||
)
|
)
|
||||||
|
|
||||||
results = filter(lambda row: row["room_id"] in room_ids, results)
|
results = filter(lambda row: row["room_id"] in room_ids, results)
|
||||||
|
@ -194,21 +224,37 @@ class SearchStore(BackgroundUpdateStore):
|
||||||
for ev in events
|
for ev in events
|
||||||
}
|
}
|
||||||
|
|
||||||
defer.returnValue([
|
highlights = None
|
||||||
{
|
if isinstance(self.database_engine, PostgresEngine):
|
||||||
"event": event_map[r["event_id"]],
|
highlights = yield self._find_highlights_in_postgres(search_query, events)
|
||||||
"rank": r["rank"],
|
|
||||||
}
|
count_sql += " GROUP BY room_id"
|
||||||
for r in results
|
|
||||||
if r["event_id"] in event_map
|
count_results = yield self._execute(
|
||||||
])
|
"search_rooms_count", self.cursor_to_dict, count_sql, *count_args
|
||||||
|
)
|
||||||
|
|
||||||
|
count = sum(row["count"] for row in count_results if row["room_id"] in room_ids)
|
||||||
|
|
||||||
|
defer.returnValue({
|
||||||
|
"results": [
|
||||||
|
{
|
||||||
|
"event": event_map[r["event_id"]],
|
||||||
|
"rank": r["rank"],
|
||||||
|
}
|
||||||
|
for r in results
|
||||||
|
if r["event_id"] in event_map
|
||||||
|
],
|
||||||
|
"highlights": highlights,
|
||||||
|
"count": count,
|
||||||
|
})
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def search_room(self, room_id, search_term, keys, limit, pagination_token=None):
|
def search_rooms(self, room_ids, search_term, keys, limit, pagination_token=None):
|
||||||
"""Performs a full text search over events with given keys.
|
"""Performs a full text search over events with given keys.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
room_id (str): The room_id to search in
|
room_id (list): The room_ids to search in
|
||||||
search_term (str): Search term to search for
|
search_term (str): Search term to search for
|
||||||
keys (list): List of keys to search in, currently supports
|
keys (list): List of keys to search in, currently supports
|
||||||
"content.body", "content.name", "content.topic"
|
"content.body", "content.name", "content.topic"
|
||||||
|
@ -218,7 +264,18 @@ class SearchStore(BackgroundUpdateStore):
|
||||||
list of dicts
|
list of dicts
|
||||||
"""
|
"""
|
||||||
clauses = []
|
clauses = []
|
||||||
args = [search_term, room_id]
|
|
||||||
|
search_query = search_query = _parse_query(self.database_engine, search_term)
|
||||||
|
|
||||||
|
args = []
|
||||||
|
|
||||||
|
# Make sure we don't explode because the person is in too many rooms.
|
||||||
|
# We filter the results below regardless.
|
||||||
|
if len(room_ids) < 500:
|
||||||
|
clauses.append(
|
||||||
|
"room_id IN (%s)" % (",".join(["?"] * len(room_ids)),)
|
||||||
|
)
|
||||||
|
args.extend(room_ids)
|
||||||
|
|
||||||
local_clauses = []
|
local_clauses = []
|
||||||
for key in keys:
|
for key in keys:
|
||||||
|
@ -229,28 +286,40 @@ class SearchStore(BackgroundUpdateStore):
|
||||||
"(%s)" % (" OR ".join(local_clauses),)
|
"(%s)" % (" OR ".join(local_clauses),)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# take copies of the current args and clauses lists, before adding
|
||||||
|
# pagination clauses to main query.
|
||||||
|
count_args = list(args)
|
||||||
|
count_clauses = list(clauses)
|
||||||
|
|
||||||
if pagination_token:
|
if pagination_token:
|
||||||
try:
|
try:
|
||||||
topo, stream = pagination_token.split(",")
|
origin_server_ts, stream = pagination_token.split(",")
|
||||||
topo = int(topo)
|
origin_server_ts = int(origin_server_ts)
|
||||||
stream = int(stream)
|
stream = int(stream)
|
||||||
except:
|
except:
|
||||||
raise SynapseError(400, "Invalid pagination token")
|
raise SynapseError(400, "Invalid pagination token")
|
||||||
|
|
||||||
clauses.append(
|
clauses.append(
|
||||||
"(topological_ordering < ?"
|
"(origin_server_ts < ?"
|
||||||
" OR (topological_ordering = ? AND stream_ordering < ?))"
|
" OR (origin_server_ts = ? AND stream_ordering < ?))"
|
||||||
)
|
)
|
||||||
args.extend([topo, topo, stream])
|
args.extend([origin_server_ts, origin_server_ts, stream])
|
||||||
|
|
||||||
if isinstance(self.database_engine, PostgresEngine):
|
if isinstance(self.database_engine, PostgresEngine):
|
||||||
sql = (
|
sql = (
|
||||||
"SELECT ts_rank_cd(vector, query) as rank,"
|
"SELECT ts_rank_cd(vector, to_tsquery('english', ?)) as rank,"
|
||||||
" topological_ordering, stream_ordering, room_id, event_id"
|
" origin_server_ts, stream_ordering, room_id, event_id"
|
||||||
" FROM plainto_tsquery('english', ?) as query, event_search"
|
" FROM event_search"
|
||||||
" NATURAL JOIN events"
|
" NATURAL JOIN events"
|
||||||
" WHERE vector @@ query AND room_id = ?"
|
" WHERE vector @@ to_tsquery('english', ?) AND "
|
||||||
)
|
)
|
||||||
|
args = [search_query, search_query] + args
|
||||||
|
|
||||||
|
count_sql = (
|
||||||
|
"SELECT room_id, count(*) as count FROM event_search"
|
||||||
|
" WHERE vector @@ to_tsquery('english', ?) AND "
|
||||||
|
)
|
||||||
|
count_args = [search_query] + count_args
|
||||||
elif isinstance(self.database_engine, Sqlite3Engine):
|
elif isinstance(self.database_engine, Sqlite3Engine):
|
||||||
# We use CROSS JOIN here to ensure we use the right indexes.
|
# We use CROSS JOIN here to ensure we use the right indexes.
|
||||||
# https://sqlite.org/optoverview.html#crossjoin
|
# https://sqlite.org/optoverview.html#crossjoin
|
||||||
|
@ -262,24 +331,31 @@ class SearchStore(BackgroundUpdateStore):
|
||||||
# MATCH unless it uses the full text search index
|
# MATCH unless it uses the full text search index
|
||||||
sql = (
|
sql = (
|
||||||
"SELECT rank(matchinfo) as rank, room_id, event_id,"
|
"SELECT rank(matchinfo) as rank, room_id, event_id,"
|
||||||
" topological_ordering, stream_ordering"
|
" origin_server_ts, stream_ordering"
|
||||||
" FROM (SELECT key, event_id, matchinfo(event_search) as matchinfo"
|
" FROM (SELECT key, event_id, matchinfo(event_search) as matchinfo"
|
||||||
" FROM event_search"
|
" FROM event_search"
|
||||||
" WHERE value MATCH ?"
|
" WHERE value MATCH ?"
|
||||||
" )"
|
" )"
|
||||||
" CROSS JOIN events USING (event_id)"
|
" CROSS JOIN events USING (event_id)"
|
||||||
" WHERE room_id = ?"
|
" WHERE "
|
||||||
)
|
)
|
||||||
|
args = [search_query] + args
|
||||||
|
|
||||||
|
count_sql = (
|
||||||
|
"SELECT room_id, count(*) as count FROM event_search"
|
||||||
|
" WHERE value MATCH ? AND "
|
||||||
|
)
|
||||||
|
count_args = [search_term] + count_args
|
||||||
else:
|
else:
|
||||||
# This should be unreachable.
|
# This should be unreachable.
|
||||||
raise Exception("Unrecognized database engine")
|
raise Exception("Unrecognized database engine")
|
||||||
|
|
||||||
for clause in clauses:
|
sql += " AND ".join(clauses)
|
||||||
sql += " AND " + clause
|
count_sql += " AND ".join(count_clauses)
|
||||||
|
|
||||||
# We add an arbitrary limit here to ensure we don't try to pull the
|
# We add an arbitrary limit here to ensure we don't try to pull the
|
||||||
# entire table from the database.
|
# entire table from the database.
|
||||||
sql += " ORDER BY topological_ordering DESC, stream_ordering DESC LIMIT ?"
|
sql += " ORDER BY origin_server_ts DESC, stream_ordering DESC LIMIT ?"
|
||||||
|
|
||||||
args.append(limit)
|
args.append(limit)
|
||||||
|
|
||||||
|
@ -287,6 +363,8 @@ class SearchStore(BackgroundUpdateStore):
|
||||||
"search_rooms", self.cursor_to_dict, sql, *args
|
"search_rooms", self.cursor_to_dict, sql, *args
|
||||||
)
|
)
|
||||||
|
|
||||||
|
results = filter(lambda row: row["room_id"] in room_ids, results)
|
||||||
|
|
||||||
events = yield self._get_events([r["event_id"] for r in results])
|
events = yield self._get_events([r["event_id"] for r in results])
|
||||||
|
|
||||||
event_map = {
|
event_map = {
|
||||||
|
@ -294,14 +372,119 @@ class SearchStore(BackgroundUpdateStore):
|
||||||
for ev in events
|
for ev in events
|
||||||
}
|
}
|
||||||
|
|
||||||
defer.returnValue([
|
highlights = None
|
||||||
{
|
if isinstance(self.database_engine, PostgresEngine):
|
||||||
"event": event_map[r["event_id"]],
|
highlights = yield self._find_highlights_in_postgres(search_query, events)
|
||||||
"rank": r["rank"],
|
|
||||||
"pagination_token": "%s,%s" % (
|
count_sql += " GROUP BY room_id"
|
||||||
r["topological_ordering"], r["stream_ordering"]
|
|
||||||
),
|
count_results = yield self._execute(
|
||||||
}
|
"search_rooms_count", self.cursor_to_dict, count_sql, *count_args
|
||||||
for r in results
|
)
|
||||||
if r["event_id"] in event_map
|
|
||||||
])
|
count = sum(row["count"] for row in count_results if row["room_id"] in room_ids)
|
||||||
|
|
||||||
|
defer.returnValue({
|
||||||
|
"results": [
|
||||||
|
{
|
||||||
|
"event": event_map[r["event_id"]],
|
||||||
|
"rank": r["rank"],
|
||||||
|
"pagination_token": "%s,%s" % (
|
||||||
|
r["origin_server_ts"], r["stream_ordering"]
|
||||||
|
),
|
||||||
|
}
|
||||||
|
for r in results
|
||||||
|
if r["event_id"] in event_map
|
||||||
|
],
|
||||||
|
"highlights": highlights,
|
||||||
|
"count": count,
|
||||||
|
})
|
||||||
|
|
||||||
|
def _find_highlights_in_postgres(self, search_query, events):
|
||||||
|
"""Given a list of events and a search term, return a list of words
|
||||||
|
that match from the content of the event.
|
||||||
|
|
||||||
|
This is used to give a list of words that clients can match against to
|
||||||
|
highlight the matching parts.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
search_query (str)
|
||||||
|
events (list): A list of events
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
deferred : A set of strings.
|
||||||
|
"""
|
||||||
|
def f(txn):
|
||||||
|
highlight_words = set()
|
||||||
|
for event in events:
|
||||||
|
# As a hack we simply join values of all possible keys. This is
|
||||||
|
# fine since we're only using them to find possible highlights.
|
||||||
|
values = []
|
||||||
|
for key in ("body", "name", "topic"):
|
||||||
|
v = event.content.get(key, None)
|
||||||
|
if v:
|
||||||
|
values.append(v)
|
||||||
|
|
||||||
|
if not values:
|
||||||
|
continue
|
||||||
|
|
||||||
|
value = " ".join(values)
|
||||||
|
|
||||||
|
# We need to find some values for StartSel and StopSel that
|
||||||
|
# aren't in the value so that we can pick results out.
|
||||||
|
start_sel = "<"
|
||||||
|
stop_sel = ">"
|
||||||
|
|
||||||
|
while start_sel in value:
|
||||||
|
start_sel += "<"
|
||||||
|
while stop_sel in value:
|
||||||
|
stop_sel += ">"
|
||||||
|
|
||||||
|
query = "SELECT ts_headline(?, to_tsquery('english', ?), %s)" % (
|
||||||
|
_to_postgres_options({
|
||||||
|
"StartSel": start_sel,
|
||||||
|
"StopSel": stop_sel,
|
||||||
|
"MaxFragments": "50",
|
||||||
|
})
|
||||||
|
)
|
||||||
|
txn.execute(query, (value, search_query,))
|
||||||
|
headline, = txn.fetchall()[0]
|
||||||
|
|
||||||
|
# Now we need to pick the possible highlights out of the haedline
|
||||||
|
# result.
|
||||||
|
matcher_regex = "%s(.*?)%s" % (
|
||||||
|
re.escape(start_sel),
|
||||||
|
re.escape(stop_sel),
|
||||||
|
)
|
||||||
|
|
||||||
|
res = re.findall(matcher_regex, headline)
|
||||||
|
highlight_words.update([r.lower() for r in res])
|
||||||
|
|
||||||
|
return highlight_words
|
||||||
|
|
||||||
|
return self.runInteraction("_find_highlights", f)
|
||||||
|
|
||||||
|
|
||||||
|
def _to_postgres_options(options_dict):
|
||||||
|
return "'%s'" % (
|
||||||
|
",".join("%s=%s" % (k, v) for k, v in options_dict.items()),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_query(database_engine, search_term):
|
||||||
|
"""Takes a plain unicode string from the user and converts it into a form
|
||||||
|
that can be passed to database.
|
||||||
|
We use this so that we can add prefix matching, which isn't something
|
||||||
|
that is supported by default.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Pull out the individual words, discarding any non-word characters.
|
||||||
|
results = re.findall(r"([\w\-]+)", search_term, re.UNICODE)
|
||||||
|
|
||||||
|
if isinstance(database_engine, PostgresEngine):
|
||||||
|
return " & ".join(result + ":*" for result in results)
|
||||||
|
elif isinstance(database_engine, Sqlite3Engine):
|
||||||
|
return " & ".join(result + "*" for result in results)
|
||||||
|
else:
|
||||||
|
# This should be unreachable.
|
||||||
|
raise Exception("Unrecognized database engine")
|
||||||
|
|
|
@ -48,8 +48,8 @@ class TagsStore(SQLBaseStore):
|
||||||
Args:
|
Args:
|
||||||
user_id(str): The user to get the tags for.
|
user_id(str): The user to get the tags for.
|
||||||
Returns:
|
Returns:
|
||||||
A deferred dict mapping from room_id strings to lists of tag
|
A deferred dict mapping from room_id strings to dicts mapping from
|
||||||
strings.
|
tag strings to tag content.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
deferred = self._simple_select_list(
|
deferred = self._simple_select_list(
|
||||||
|
|
|
@ -64,8 +64,7 @@ class Clock(object):
|
||||||
current_context = LoggingContext.current_context()
|
current_context = LoggingContext.current_context()
|
||||||
|
|
||||||
def wrapped_callback(*args, **kwargs):
|
def wrapped_callback(*args, **kwargs):
|
||||||
with PreserveLoggingContext():
|
with PreserveLoggingContext(current_context):
|
||||||
LoggingContext.thread_local.current_context = current_context
|
|
||||||
callback(*args, **kwargs)
|
callback(*args, **kwargs)
|
||||||
|
|
||||||
with PreserveLoggingContext():
|
with PreserveLoggingContext():
|
||||||
|
|
93
synapse/util/caches/snapshot_cache.py
Normal file
93
synapse/util/caches/snapshot_cache.py
Normal file
|
@ -0,0 +1,93 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Copyright 2015 OpenMarket Ltd
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from synapse.util.async import ObservableDeferred
|
||||||
|
|
||||||
|
|
||||||
|
class SnapshotCache(object):
|
||||||
|
"""Cache for snapshots like the response of /initialSync.
|
||||||
|
The response of initialSync only has to be a recent snapshot of the
|
||||||
|
server state. It shouldn't matter to clients if it is a few minutes out
|
||||||
|
of date.
|
||||||
|
|
||||||
|
This caches a deferred response. Until the deferred completes it will be
|
||||||
|
returned from the cache. This means that if the client retries the request
|
||||||
|
while the response is still being computed, that original response will be
|
||||||
|
used rather than trying to compute a new response.
|
||||||
|
|
||||||
|
Once the deferred completes it will removed from the cache after 5 minutes.
|
||||||
|
We delay removing it from the cache because a client retrying its request
|
||||||
|
could race with us finishing computing the response.
|
||||||
|
|
||||||
|
Rather than tracking precisely how long something has been in the cache we
|
||||||
|
keep two generations of completed responses. Every 5 minutes discard the
|
||||||
|
old generation, move the new generation to the old generation, and set the
|
||||||
|
new generation to be empty. This means that a result will be in the cache
|
||||||
|
somewhere between 5 and 10 minutes.
|
||||||
|
"""
|
||||||
|
|
||||||
|
DURATION_MS = 5 * 60 * 1000 # Cache results for 5 minutes.
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.pending_result_cache = {} # Request that haven't finished yet.
|
||||||
|
self.prev_result_cache = {} # The older requests that have finished.
|
||||||
|
self.next_result_cache = {} # The newer requests that have finished.
|
||||||
|
self.time_last_rotated_ms = 0
|
||||||
|
|
||||||
|
def rotate(self, time_now_ms):
|
||||||
|
# Rotate once if the cache duration has passed since the last rotation.
|
||||||
|
if time_now_ms - self.time_last_rotated_ms >= self.DURATION_MS:
|
||||||
|
self.prev_result_cache = self.next_result_cache
|
||||||
|
self.next_result_cache = {}
|
||||||
|
self.time_last_rotated_ms += self.DURATION_MS
|
||||||
|
|
||||||
|
# Rotate again if the cache duration has passed twice since the last
|
||||||
|
# rotation.
|
||||||
|
if time_now_ms - self.time_last_rotated_ms >= self.DURATION_MS:
|
||||||
|
self.prev_result_cache = self.next_result_cache
|
||||||
|
self.next_result_cache = {}
|
||||||
|
self.time_last_rotated_ms = time_now_ms
|
||||||
|
|
||||||
|
def get(self, time_now_ms, key):
|
||||||
|
self.rotate(time_now_ms)
|
||||||
|
# This cache is intended to deduplicate requests, so we expect it to be
|
||||||
|
# missed most of the time. So we just lookup the key in all of the
|
||||||
|
# dictionaries rather than trying to short circuit the lookup if the
|
||||||
|
# key is found.
|
||||||
|
result = self.prev_result_cache.get(key)
|
||||||
|
result = self.next_result_cache.get(key, result)
|
||||||
|
result = self.pending_result_cache.get(key, result)
|
||||||
|
if result is not None:
|
||||||
|
return result.observe()
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def set(self, time_now_ms, key, deferred):
|
||||||
|
self.rotate(time_now_ms)
|
||||||
|
|
||||||
|
result = ObservableDeferred(deferred)
|
||||||
|
|
||||||
|
self.pending_result_cache[key] = result
|
||||||
|
|
||||||
|
def shuffle_along(r):
|
||||||
|
# When the deferred completes we shuffle it along to the first
|
||||||
|
# generation of the result cache. So that it will eventually
|
||||||
|
# expire from the rotation of that cache.
|
||||||
|
self.next_result_cache[key] = result
|
||||||
|
self.pending_result_cache.pop(key, None)
|
||||||
|
|
||||||
|
result.observe().addBoth(shuffle_along)
|
||||||
|
|
||||||
|
return result.observe()
|
|
@ -30,8 +30,7 @@ def debug_deferreds():
|
||||||
context = LoggingContext.current_context()
|
context = LoggingContext.current_context()
|
||||||
|
|
||||||
def restore_context_callback(x):
|
def restore_context_callback(x):
|
||||||
with PreserveLoggingContext():
|
with PreserveLoggingContext(context):
|
||||||
LoggingContext.thread_local.current_context = context
|
|
||||||
return fn(x)
|
return fn(x)
|
||||||
|
|
||||||
return restore_context_callback
|
return restore_context_callback
|
||||||
|
|
|
@ -19,6 +19,25 @@ import logging
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
try:
|
||||||
|
import resource
|
||||||
|
|
||||||
|
# Python doesn't ship with a definition of RUSAGE_THREAD but it's defined
|
||||||
|
# to be 1 on linux so we hard code it.
|
||||||
|
RUSAGE_THREAD = 1
|
||||||
|
|
||||||
|
# If the system doesn't support RUSAGE_THREAD then this should throw an
|
||||||
|
# exception.
|
||||||
|
resource.getrusage(RUSAGE_THREAD)
|
||||||
|
|
||||||
|
def get_thread_resource_usage():
|
||||||
|
return resource.getrusage(RUSAGE_THREAD)
|
||||||
|
except:
|
||||||
|
# If the system doesn't support resource.getrusage(RUSAGE_THREAD) then we
|
||||||
|
# won't track resource usage by returning None.
|
||||||
|
def get_thread_resource_usage():
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
class LoggingContext(object):
|
class LoggingContext(object):
|
||||||
"""Additional context for log formatting. Contexts are scoped within a
|
"""Additional context for log formatting. Contexts are scoped within a
|
||||||
|
@ -27,7 +46,9 @@ class LoggingContext(object):
|
||||||
name (str): Name for the context for debugging.
|
name (str): Name for the context for debugging.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
__slots__ = ["parent_context", "name", "__dict__"]
|
__slots__ = [
|
||||||
|
"parent_context", "name", "usage_start", "usage_end", "main_thread", "__dict__"
|
||||||
|
]
|
||||||
|
|
||||||
thread_local = threading.local()
|
thread_local = threading.local()
|
||||||
|
|
||||||
|
@ -42,11 +63,26 @@ class LoggingContext(object):
|
||||||
def copy_to(self, record):
|
def copy_to(self, record):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
def start(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def stop(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def add_database_transaction(self, duration_ms):
|
||||||
|
pass
|
||||||
|
|
||||||
sentinel = Sentinel()
|
sentinel = Sentinel()
|
||||||
|
|
||||||
def __init__(self, name=None):
|
def __init__(self, name=None):
|
||||||
self.parent_context = None
|
self.parent_context = None
|
||||||
self.name = name
|
self.name = name
|
||||||
|
self.ru_stime = 0.
|
||||||
|
self.ru_utime = 0.
|
||||||
|
self.db_txn_count = 0
|
||||||
|
self.db_txn_duration = 0.
|
||||||
|
self.usage_start = None
|
||||||
|
self.main_thread = threading.current_thread()
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return "%s@%x" % (self.name, id(self))
|
return "%s@%x" % (self.name, id(self))
|
||||||
|
@ -56,12 +92,26 @@ class LoggingContext(object):
|
||||||
"""Get the current logging context from thread local storage"""
|
"""Get the current logging context from thread local storage"""
|
||||||
return getattr(cls.thread_local, "current_context", cls.sentinel)
|
return getattr(cls.thread_local, "current_context", cls.sentinel)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def set_current_context(cls, context):
|
||||||
|
"""Set the current logging context in thread local storage
|
||||||
|
Args:
|
||||||
|
context(LoggingContext): The context to activate.
|
||||||
|
Returns:
|
||||||
|
The context that was previously active
|
||||||
|
"""
|
||||||
|
current = cls.current_context()
|
||||||
|
if current is not context:
|
||||||
|
current.stop()
|
||||||
|
cls.thread_local.current_context = context
|
||||||
|
context.start()
|
||||||
|
return current
|
||||||
|
|
||||||
def __enter__(self):
|
def __enter__(self):
|
||||||
"""Enters this logging context into thread local storage"""
|
"""Enters this logging context into thread local storage"""
|
||||||
if self.parent_context is not None:
|
if self.parent_context is not None:
|
||||||
raise Exception("Attempt to enter logging context multiple times")
|
raise Exception("Attempt to enter logging context multiple times")
|
||||||
self.parent_context = self.current_context()
|
self.parent_context = self.set_current_context(self)
|
||||||
self.thread_local.current_context = self
|
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def __exit__(self, type, value, traceback):
|
def __exit__(self, type, value, traceback):
|
||||||
|
@ -70,16 +120,16 @@ class LoggingContext(object):
|
||||||
Returns:
|
Returns:
|
||||||
None to avoid suppressing any exeptions that were thrown.
|
None to avoid suppressing any exeptions that were thrown.
|
||||||
"""
|
"""
|
||||||
if self.thread_local.current_context is not self:
|
current = self.set_current_context(self.parent_context)
|
||||||
if self.thread_local.current_context is self.sentinel:
|
if current is not self:
|
||||||
|
if current is self.sentinel:
|
||||||
logger.debug("Expected logging context %s has been lost", self)
|
logger.debug("Expected logging context %s has been lost", self)
|
||||||
else:
|
else:
|
||||||
logger.warn(
|
logger.warn(
|
||||||
"Current logging context %s is not expected context %s",
|
"Current logging context %s is not expected context %s",
|
||||||
self.thread_local.current_context,
|
current,
|
||||||
self
|
self
|
||||||
)
|
)
|
||||||
self.thread_local.current_context = self.parent_context
|
|
||||||
self.parent_context = None
|
self.parent_context = None
|
||||||
|
|
||||||
def __getattr__(self, name):
|
def __getattr__(self, name):
|
||||||
|
@ -93,6 +143,43 @@ class LoggingContext(object):
|
||||||
for key, value in self.__dict__.items():
|
for key, value in self.__dict__.items():
|
||||||
setattr(record, key, value)
|
setattr(record, key, value)
|
||||||
|
|
||||||
|
record.ru_utime, record.ru_stime = self.get_resource_usage()
|
||||||
|
|
||||||
|
def start(self):
|
||||||
|
if threading.current_thread() is not self.main_thread:
|
||||||
|
return
|
||||||
|
|
||||||
|
if self.usage_start and self.usage_end:
|
||||||
|
self.ru_utime += self.usage_end.ru_utime - self.usage_start.ru_utime
|
||||||
|
self.ru_stime += self.usage_end.ru_stime - self.usage_start.ru_stime
|
||||||
|
self.usage_start = None
|
||||||
|
self.usage_end = None
|
||||||
|
|
||||||
|
if not self.usage_start:
|
||||||
|
self.usage_start = get_thread_resource_usage()
|
||||||
|
|
||||||
|
def stop(self):
|
||||||
|
if threading.current_thread() is not self.main_thread:
|
||||||
|
return
|
||||||
|
|
||||||
|
if self.usage_start:
|
||||||
|
self.usage_end = get_thread_resource_usage()
|
||||||
|
|
||||||
|
def get_resource_usage(self):
|
||||||
|
ru_utime = self.ru_utime
|
||||||
|
ru_stime = self.ru_stime
|
||||||
|
|
||||||
|
if self.usage_start and threading.current_thread() is self.main_thread:
|
||||||
|
current = get_thread_resource_usage()
|
||||||
|
ru_utime += current.ru_utime - self.usage_start.ru_utime
|
||||||
|
ru_stime += current.ru_stime - self.usage_start.ru_stime
|
||||||
|
|
||||||
|
return ru_utime, ru_stime
|
||||||
|
|
||||||
|
def add_database_transaction(self, duration_ms):
|
||||||
|
self.db_txn_count += 1
|
||||||
|
self.db_txn_duration += duration_ms / 1000.
|
||||||
|
|
||||||
|
|
||||||
class LoggingContextFilter(logging.Filter):
|
class LoggingContextFilter(logging.Filter):
|
||||||
"""Logging filter that adds values from the current logging context to each
|
"""Logging filter that adds values from the current logging context to each
|
||||||
|
@ -121,17 +208,20 @@ class PreserveLoggingContext(object):
|
||||||
exited. Used to restore the context after a function using
|
exited. Used to restore the context after a function using
|
||||||
@defer.inlineCallbacks is resumed by a callback from the reactor."""
|
@defer.inlineCallbacks is resumed by a callback from the reactor."""
|
||||||
|
|
||||||
__slots__ = ["current_context"]
|
__slots__ = ["current_context", "new_context"]
|
||||||
|
|
||||||
|
def __init__(self, new_context=LoggingContext.sentinel):
|
||||||
|
self.new_context = new_context
|
||||||
|
|
||||||
def __enter__(self):
|
def __enter__(self):
|
||||||
"""Captures the current logging context"""
|
"""Captures the current logging context"""
|
||||||
self.current_context = LoggingContext.current_context()
|
self.current_context = LoggingContext.set_current_context(
|
||||||
LoggingContext.thread_local.current_context = LoggingContext.sentinel
|
self.new_context
|
||||||
|
)
|
||||||
|
|
||||||
def __exit__(self, type, value, traceback):
|
def __exit__(self, type, value, traceback):
|
||||||
"""Restores the current logging context"""
|
"""Restores the current logging context"""
|
||||||
LoggingContext.thread_local.current_context = self.current_context
|
LoggingContext.set_current_context(self.current_context)
|
||||||
|
|
||||||
if self.current_context is not LoggingContext.sentinel:
|
if self.current_context is not LoggingContext.sentinel:
|
||||||
if self.current_context.parent_context is None:
|
if self.current_context.parent_context is None:
|
||||||
logger.warn(
|
logger.warn(
|
||||||
|
@ -164,8 +254,7 @@ class _PreservingContextDeferred(defer.Deferred):
|
||||||
|
|
||||||
def _wrap_callback(self, f):
|
def _wrap_callback(self, f):
|
||||||
def g(res, *args, **kwargs):
|
def g(res, *args, **kwargs):
|
||||||
with PreserveLoggingContext():
|
with PreserveLoggingContext(self._log_context):
|
||||||
LoggingContext.thread_local.current_context = self._log_context
|
|
||||||
res = f(res, *args, **kwargs)
|
res = f(res, *args, **kwargs)
|
||||||
return res
|
return res
|
||||||
return g
|
return g
|
||||||
|
|
|
@ -365,7 +365,7 @@ class PresenceInvitesTestCase(PresenceTestCase):
|
||||||
# TODO(paul): This test will likely break if/when real auth permissions
|
# TODO(paul): This test will likely break if/when real auth permissions
|
||||||
# are added; for now the HS will always accept any invite
|
# are added; for now the HS will always accept any invite
|
||||||
|
|
||||||
yield self.handler.send_invite(
|
yield self.handler.send_presence_invite(
|
||||||
observer_user=self.u_apple, observed_user=self.u_banana)
|
observer_user=self.u_apple, observed_user=self.u_banana)
|
||||||
|
|
||||||
self.assertEquals(
|
self.assertEquals(
|
||||||
|
@ -384,7 +384,7 @@ class PresenceInvitesTestCase(PresenceTestCase):
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def test_invite_local_nonexistant(self):
|
def test_invite_local_nonexistant(self):
|
||||||
yield self.handler.send_invite(
|
yield self.handler.send_presence_invite(
|
||||||
observer_user=self.u_apple, observed_user=self.u_durian)
|
observer_user=self.u_apple, observed_user=self.u_durian)
|
||||||
|
|
||||||
self.assertEquals(
|
self.assertEquals(
|
||||||
|
@ -414,7 +414,7 @@ class PresenceInvitesTestCase(PresenceTestCase):
|
||||||
defer.succeed((200, "OK"))
|
defer.succeed((200, "OK"))
|
||||||
)
|
)
|
||||||
|
|
||||||
yield self.handler.send_invite(
|
yield self.handler.send_presence_invite(
|
||||||
observer_user=self.u_apple, observed_user=u_rocket)
|
observer_user=self.u_apple, observed_user=u_rocket)
|
||||||
|
|
||||||
self.assertEquals(
|
self.assertEquals(
|
||||||
|
|
60
tests/util/test_snapshot_cache.py
Normal file
60
tests/util/test_snapshot_cache.py
Normal file
|
@ -0,0 +1,60 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Copyright 2015 OpenMarket Ltd
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
|
||||||
|
from .. import unittest
|
||||||
|
|
||||||
|
from synapse.util.caches.snapshot_cache import SnapshotCache
|
||||||
|
from twisted.internet.defer import Deferred
|
||||||
|
|
||||||
|
class SnapshotCacheTestCase(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.cache = SnapshotCache()
|
||||||
|
self.cache.DURATION_MS = 1
|
||||||
|
|
||||||
|
def test_get_set(self):
|
||||||
|
# Check that getting a missing key returns None
|
||||||
|
self.assertEquals(self.cache.get(0, "key"), None)
|
||||||
|
|
||||||
|
# Check that setting a key with a deferred returns
|
||||||
|
# a deferred that resolves when the initial deferred does
|
||||||
|
d = Deferred()
|
||||||
|
set_result = self.cache.set(0, "key", d)
|
||||||
|
self.assertIsNotNone(set_result)
|
||||||
|
self.assertFalse(set_result.called)
|
||||||
|
|
||||||
|
# Check that getting the key before the deferred has resolved
|
||||||
|
# returns a deferred that resolves when the initial deferred does.
|
||||||
|
get_result_at_10 = self.cache.get(10, "key")
|
||||||
|
self.assertIsNotNone(get_result_at_10)
|
||||||
|
self.assertFalse(get_result_at_10.called)
|
||||||
|
|
||||||
|
# Check that the returned deferreds resolve when the initial deferred
|
||||||
|
# does.
|
||||||
|
d.callback("v")
|
||||||
|
self.assertTrue(set_result.called)
|
||||||
|
self.assertTrue(get_result_at_10.called)
|
||||||
|
|
||||||
|
# Check that getting the key after the deferred has resolved
|
||||||
|
# before the cache expires returns a resolved deferred.
|
||||||
|
get_result_at_11 = self.cache.get(11, "key")
|
||||||
|
self.assertIsNotNone(get_result_at_11)
|
||||||
|
self.assertTrue(get_result_at_11.called)
|
||||||
|
|
||||||
|
# Check that getting the key after the deferred has resolved
|
||||||
|
# after the cache expires returns None
|
||||||
|
get_result_at_12 = self.cache.get(12, "key")
|
||||||
|
self.assertIsNone(get_result_at_12)
|
|
@ -168,8 +168,9 @@ class MockHttpResource(HttpServer):
|
||||||
|
|
||||||
raise KeyError("No event can handle %s" % path)
|
raise KeyError("No event can handle %s" % path)
|
||||||
|
|
||||||
def register_path(self, method, path_pattern, callback):
|
def register_paths(self, method, path_patterns, callback):
|
||||||
self.callbacks.append((method, path_pattern, callback))
|
for path_pattern in path_patterns:
|
||||||
|
self.callbacks.append((method, path_pattern, callback))
|
||||||
|
|
||||||
|
|
||||||
class MockKey(object):
|
class MockKey(object):
|
||||||
|
|
3
tox.ini
3
tox.ini
|
@ -11,7 +11,8 @@ deps =
|
||||||
setenv =
|
setenv =
|
||||||
PYTHONDONTWRITEBYTECODE = no_byte_code
|
PYTHONDONTWRITEBYTECODE = no_byte_code
|
||||||
commands =
|
commands =
|
||||||
/bin/bash -c "coverage run --source=synapse {envbindir}/trial {env:TRIAL_FLAGS:} {posargs:tests} {env:TOXSUFFIX:}"
|
/bin/bash -c "coverage run {env:COVERAGE_OPTS:} --source={toxinidir}/synapse \
|
||||||
|
{envbindir}/trial {env:TRIAL_FLAGS:} {posargs:tests} {env:TOXSUFFIX:}"
|
||||||
{env:DUMP_COVERAGE_COMMAND:coverage report -m}
|
{env:DUMP_COVERAGE_COMMAND:coverage report -m}
|
||||||
|
|
||||||
[testenv:packaging]
|
[testenv:packaging]
|
||||||
|
|
Loading…
Reference in a new issue