repo
stringclasses
856 values
pull_number
int64
3
127k
instance_id
stringlengths
12
58
issue_numbers
sequencelengths
1
5
base_commit
stringlengths
40
40
patch
stringlengths
67
1.54M
test_patch
stringlengths
0
107M
problem_statement
stringlengths
3
307k
hints_text
stringlengths
0
908k
created_at
timestamp[s]
zulip/zulip
5,496
zulip__zulip-5496
[ "5431" ]
bd9459f27d67442343f8a2f6a1ed81e363c60c92
diff --git a/zproject/backends.py b/zproject/backends.py --- a/zproject/backends.py +++ b/zproject/backends.py @@ -440,6 +440,8 @@ def get_or_create_user(self, username, ldap_user): raise ZulipLDAPException("LDAP Authentication is not enabled") return user_profile, False except UserProfile.DoesNotExist: + if self._realm is None: + raise ZulipLDAPException("Realm is None") # No need to check for an inactive user since they don't exist yet if self._realm.deactivated: raise ZulipLDAPException("Realm has been deactivated")
diff --git a/zerver/tests/test_auth_backends.py b/zerver/tests/test_auth_backends.py --- a/zerver/tests/test_auth_backends.py +++ b/zerver/tests/test_auth_backends.py @@ -1776,8 +1776,8 @@ def test_login_failure_due_to_wrong_password(self): LDAP_APPEND_DOMAIN='zulip.com', AUTH_LDAP_BIND_PASSWORD='', AUTH_LDAP_USER_DN_TEMPLATE='uid=%(user)s,ou=users,dc=zulip,dc=com'): - user = self.backend.authenticate(self.example_email("hamlet"), 'wrong') - self.assertIs(user, None) + user = self.backend.authenticate(self.example_email("hamlet"), 'wrong') + self.assertIs(user, None) @override_settings(AUTHENTICATION_BACKENDS=('zproject.backends.ZulipLDAPAuthBackend',)) def test_login_failure_due_to_nonexistent_user(self): @@ -1791,8 +1791,8 @@ def test_login_failure_due_to_nonexistent_user(self): LDAP_APPEND_DOMAIN='zulip.com', AUTH_LDAP_BIND_PASSWORD='', AUTH_LDAP_USER_DN_TEMPLATE='uid=%(user)s,ou=users,dc=zulip,dc=com'): - user = self.backend.authenticate('[email protected]', 'testing') - self.assertIs(user, None) + user = self.backend.authenticate('[email protected]', 'testing') + self.assertIs(user, None) @override_settings(AUTHENTICATION_BACKENDS=('zproject.backends.ZulipLDAPAuthBackend',)) def test_ldap_permissions(self): @@ -1876,6 +1876,21 @@ class _LDAPUser(object): with self.assertRaisesRegex(Exception, 'Realm has been deactivated'): backend.get_or_create_user(email, _LDAPUser()) + @override_settings(AUTHENTICATION_BACKENDS=('zproject.backends.ZulipLDAPAuthBackend',)) + def test_get_or_create_user_when_realm_is_none(self): + # type: () -> None + class _LDAPUser(object): + attrs = {'fn': ['Full Name'], 'sn': ['Short Name']} + + ldap_user_attr_map = {'full_name': 'fn', 'short_name': 'sn'} + + with self.settings(AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map): + backend = self.backend + email = '[email protected]' + backend._realm = None + with self.assertRaisesRegex(Exception, 'Realm is None'): + backend.get_or_create_user(email, _LDAPUser()) + @override_settings(AUTHENTICATION_BACKENDS=('zproject.backends.ZulipLDAPAuthBackend',)) def test_django_to_ldap_username_when_domain_does_not_match(self): # type: () -> None
500 server error on LDAP login I'm not sure if this is related to server configuration, as I've not successfully gotten LDAP login to work yet. But `manage.py query_ldap` is fine, so I thought that should mean my Zulip configuration is correct. When I login using the user's email and pw, Zulip contacts the directory server but then I get an error page. If I provide an incorrect pw I get the expected message, so it appears to be authenticating. The server is Open Directory on OS X Server 10.11.4. It's not currently configured for SSL (cert issues.) ``` Jun 15 19:44:43 myserver kdc[5536]: AS-REQ [email protected] from 127.0.0.1:57971 for krbtgt/[email protected] Jun 15 19:44:43 myserver sandboxd[150] ([5536]): kdc(5536) deny file-read-data /private/etc/krb5.conf Jun 15 19:44:43 myserver kdc[5536]: AS-REQ [email protected] from 127.0.0.1:57971 for krbtgt/[email protected] Jun 15 19:44:43 myserver kdc[5536]: Client sent patypes: REQ-ENC-PA-REP Jun 15 19:44:43 myserver kdc[5536]: Need to use PA-ENC-TIMESTAMP/PA-PK-AS-REQ Jun 15 19:44:43 myserver kdc[5536]: AS-REQ [email protected] from 127.0.0.1:62639 for krbtgt/[email protected] Jun 15 19:44:43 --- last message repeated 1 time --- Jun 15 19:44:43 myserver kdc[5536]: Client sent patypes: ENC-TS, REQ-ENC-PA-REP Jun 15 19:44:43 myserver kdc[5536]: ENC-TS pre-authentication succeeded -- [email protected] Jun 15 19:44:43 myserver kdc[5536]: DSUpdateLoginStatus: Unable to synchronize login time for diradmin: 77009 Jun 15 19:44:43 myserver kdc[5536]: Client supported enctypes: aes256-cts-hmac-sha1-96, aes128-cts-hmac-sha1-96, des3-cbc-sha1, arcfour-hmac-md5, using aes256-cts-hmac-sha1-96/aes256-cts-hmac-sha1-96 Jun 15 19:44:43 myserver kdc[5536]: Requested flags: forwardable Jun 15 19:44:43 myserver kdc[5536]: AS-REQ [email protected] from 127.0.0.1:57431 for krbtgt/[email protected] Jun 15 19:44:43 --- last message repeated 1 time --- Jun 15 19:44:43 myserver kdc[5536]: Client sent patypes: REQ-ENC-PA-REP Jun 15 19:44:43 myserver kdc[5536]: Need to use PA-ENC-TIMESTAMP/PA-PK-AS-REQ Jun 15 19:44:43 myserver kdc[5536]: AS-REQ [email protected] from 127.0.0.1:58873 for krbtgt/[email protected] Jun 15 19:44:43 --- last message repeated 1 time --- Jun 15 19:44:43 myserver kdc[5536]: Client sent patypes: ENC-TS, REQ-ENC-PA-REP Jun 15 19:44:43 myserver kdc[5536]: ENC-TS pre-authentication succeeded -- [email protected] Jun 15 19:44:44 myserver kdc[5536]: DSUpdateLoginStatus: Unable to synchronize login time for zulipuser: 77009 Jun 15 19:44:44 myserver kdc[5536]: Client supported enctypes: aes256-cts-hmac-sha1-96, aes128-cts-hmac-sha1-96, des3-cbc-sha1, arcfour-hmac-md5, using aes256-cts-hmac-sha1-96/aes256-cts-hmac-sha1-96 Jun 15 19:44:44 myserver kdc[5536]: Requested flags: forwardable ``` ``` zulip@minmi:~/deployments/current$ ./manage.py query_ldap [email protected] full_name: [u'zulipuser'] ``` ``` # URI of your LDAP server. If set, LDAP is used to prepopulate a user's name in # Zulip. Example: "ldaps://ldap.example.com" AUTH_LDAP_SERVER_URI = "ldap://mydomain.com" # This DN will be used to bind to your server. If unset, anonymous # binds are performed. If set, you need to specify the password as # 'auth_ldap_bind_password' in zulip-secrets.conf. AUTH_LDAP_BIND_DN = "uid=diradmin,cn=users,dc=mydomain,dc=com" # Specify the search base and the property to filter on that corresponds to the # username. AUTH_LDAP_USER_SEARCH = LDAPSearch("cn=users,dc=mydomain,dc=com", ldap.SCOPE_SUBTREE, "(uid=%(user)s)") # If the value of a user's "uid" (or similar) property is not their email # address, specify the domain to append here. LDAP_APPEND_DOMAIN = "mydomain.com" ``` ``` 2017-06-16 02:56:29,703 ERROR Internal Server Error: /accounts/login/ Traceback (most recent call last): File "/home/zulip/deployments/2017-06-05-14-27-45/zulip-venv/lib/python2.7/site-packages/django/core/handlers/exception.py", line 42, in inner response = get_response(request) File "/home/zulip/deployments/2017-06-05-14-27-45/zulip-venv/lib/python2.7/site-packages/django/core/handlers/base.py", line 249, in _legacy_get_response response = self._get_response(request) File "/home/zulip/deployments/2017-06-05-14-27-45/zulip-venv/lib/python2.7/site-packages/django/core/handlers/base.py", line 187, in _get_response response = self.process_exception_by_middleware(e, request) File "/home/zulip/deployments/2017-06-05-14-27-45/zulip-venv/lib/python2.7/site-packages/django/core/handlers/base.py", line 185, in _get_response response = wrapped_callback(request, *callback_args, **callback_kwargs) File "./zerver/views/auth.py", line 445, in login_page extra_context=extra_context, **kwargs) File "/home/zulip/deployments/2017-06-05-14-27-45/zulip-venv/lib/python2.7/site-packages/django/contrib/auth/views.py", line 47, in inner return func(*args, **kwargs) File "/home/zulip/deployments/2017-06-05-14-27-45/zulip-venv/lib/python2.7/site-packages/django/views/decorators/debug.py", line 76, in sensitive_post_parameters_wrapper return view(request, *args, **kwargs) File "/home/zulip/deployments/2017-06-05-14-27-45/zulip-venv/lib/python2.7/site-packages/django/utils/decorators.py", line 149, in _wrapped_view response = view_func(request, *args, **kwargs) File "/home/zulip/deployments/2017-06-05-14-27-45/zulip-venv/lib/python2.7/site-packages/django/views/decorators/cache.py", line 57, in _wrapped_view_func response = view_func(request, *args, **kwargs) File "/home/zulip/deployments/2017-06-05-14-27-45/zulip-venv/lib/python2.7/site-packages/django/contrib/auth/views.py", line 81, in login if form.is_valid(): File "/home/zulip/deployments/2017-06-05-14-27-45/zulip-venv/lib/python2.7/site-packages/django/forms/forms.py", line 169, in is_valid return self.is_bound and not self.errors File "/home/zulip/deployments/2017-06-05-14-27-45/zulip-venv/lib/python2.7/site-packages/django/forms/forms.py", line 161, in errors self.full_clean() File "/home/zulip/deployments/2017-06-05-14-27-45/zulip-venv/lib/python2.7/site-packages/django/forms/forms.py", line 371, in full_clean self._clean_form() File "/home/zulip/deployments/2017-06-05-14-27-45/zulip-venv/lib/python2.7/site-packages/django/forms/forms.py", line 398, in _clean_form cleaned_data = self.clean() File "/home/zulip/deployments/2017-06-05-14-27-45/zulip-venv/lib/python2.7/site-packages/django/contrib/auth/forms.py", line 191, in clean self.user_cache = authenticate(username=username, password=password) File "/home/zulip/deployments/2017-06-05-14-27-45/zulip-venv/lib/python2.7/site-packages/django/contrib/auth/__init__.py", line 74, in authenticate user = backend.authenticate(**credentials) File "./zproject/backends.py", line 422, in authenticate user_profile = ZulipLDAPAuthBackendBase.authenticate(self, username, password) File "/home/zulip/deployments/2017-06-05-14-27-45/zulip-venv/lib/python2.7/site-packages/django_auth_ldap/backend.py", line 169, in authenticate user = ldap_user.authenticate(password) File "/home/zulip/deployments/2017-06-05-14-27-45/zulip-venv/lib/python2.7/site-packages/django_auth_ldap/backend.py", line 344, in authenticate self._get_or_create_user() File "/home/zulip/deployments/2017-06-05-14-27-45/zulip-venv/lib/python2.7/site-packages/django_auth_ldap/backend.py", line 553, in _get_or_create_user self._user, created = self.backend.get_or_create_user(username, self) File "./zproject/backends.py", line 444, in get_or_create_user if self._realm.deactivated: AttributeError: 'NoneType' object has no attribute 'deactivated' ``` ``` Request info: - path: /accounts/login/ - POST: {u'username': [u'********************'], u'csrfmiddlewaretoken': [u'**********'], u'password': [u'**********'], u'button': [u'********************']} - REMOTE_ADDR: "[u'1.2.3.4']" - QUERY_STRING: "[u'next=']" - SERVER_NAME: "[u'']" ```
I haven't been able to get my OD server to use a real ssl cert, so I'm still configured with ldap://. I don't know if that is related to this exception. When I debug it, it's like my realm doesn't exist. If I comment out the offending lines in get_or_create_user, it just fails further on trying to get some other attribute of realm. This install was initially set up with email auth, then I configured LDAP. The login page has the correct realm name, and I can still login with the existing email auth user (if I leave email auth enabled.) Should I be able to add LDAP later? @umairwaheed FYI; I feel like even if this is configured wrong, we shouldn't be 500ing. Hello @zulip/server-authentication members, this issue was labeled with the **area: authentication** label, so you may want to check it out!
2017-06-21T09:14:44
zulip/zulip
5,499
zulip__zulip-5499
[ "5487" ]
e49afe3ebc2924b3cffa65a47c3b14cf24cbbf5c
diff --git a/zerver/lib/users.py b/zerver/lib/users.py --- a/zerver/lib/users.py +++ b/zerver/lib/users.py @@ -18,6 +18,13 @@ def check_full_name(full_name_raw): raise JsonableError(_("Invalid characters in name!")) return full_name +def check_short_name(short_name_raw): + # type: (Text) -> Text + short_name = short_name_raw.strip() + if len(short_name) == 0: + raise JsonableError(_("Bad name or username")) + return short_name + def check_change_full_name(user_profile, full_name_raw, acting_user): # type: (UserProfile, Text, UserProfile) -> Text """Verifies that the user's proposed full name is valid. The caller diff --git a/zerver/views/users.py b/zerver/views/users.py --- a/zerver/views/users.py +++ b/zerver/views/users.py @@ -23,7 +23,8 @@ from zerver.lib.streams import access_stream_by_name from zerver.lib.upload import upload_avatar_image from zerver.lib.validator import check_bool, check_string, check_int, check_url -from zerver.lib.users import check_valid_bot_type, check_change_full_name, check_full_name +from zerver.lib.users import check_valid_bot_type, check_change_full_name, \ + check_full_name, check_short_name from zerver.lib.utils import generate_random_token from zerver.models import UserProfile, Stream, Realm, Message, get_user_profile_by_email, \ email_allowed_for_realm, get_user_profile_by_id, get_user, Service @@ -239,13 +240,14 @@ def add_outgoing_webhook_service(name, user_profile, base_url, interface, token) token=token) @has_request_variables -def add_bot_backend(request, user_profile, full_name_raw=REQ("full_name"), short_name=REQ(), +def add_bot_backend(request, user_profile, full_name_raw=REQ("full_name"), short_name_raw=REQ("short_name"), bot_type=REQ(validator=check_int, default=UserProfile.DEFAULT_BOT), payload_url=REQ(validator=check_url, default=None), default_sending_stream_name=REQ('default_sending_stream', default=None), default_events_register_stream_name=REQ('default_events_register_stream', default=None), default_all_public_streams=REQ(validator=check_bool, default=None)): # type: (HttpRequest, UserProfile, Text, Text, int, Optional[Text], Optional[Text], Optional[Text], Optional[bool]) -> HttpResponse + short_name = check_short_name(short_name_raw) service_name = short_name short_name += "-bot" full_name = check_full_name(full_name_raw)
diff --git a/zerver/tests/test_bots.py b/zerver/tests/test_bots.py --- a/zerver/tests/test_bots.py +++ b/zerver/tests/test_bots.py @@ -62,6 +62,8 @@ def test_add_bot_with_bad_username(self): # type: () -> None self.login(self.example_email('hamlet')) self.assert_num_bots_equal(0) + + # Invalid username bot_info = dict( full_name='My bot name', short_name='@', @@ -70,6 +72,15 @@ def test_add_bot_with_bad_username(self): self.assert_json_error(result, 'Bad name or username') self.assert_num_bots_equal(0) + # Empty username + bot_info = dict( + full_name='My bot name', + short_name='', + ) + result = self.client_post("/json/bots", bot_info) + self.assert_json_error(result, 'Bad name or username') + self.assert_num_bots_equal(0) + def test_add_bot_with_no_name(self): # type: () -> None self.login(self.example_email('hamlet'))
The REST API allows creating bots with empty username # Description Using the `/bots` endpoint in the REST API, it's possible to create a bot with an empty username, which results in it having `-bot@<domain>` as the email address. # Steps to reproduce 1. Start the development server. 2. Get an API key for Iago, using the `/dev_fetch_api_key` endpoint: curl -X POST -F '[email protected]' \ http://localhost:9991/api/v1/dev_fetch_api_key 3. Send an HTTP POST request to the `/bots` endpoint, using Basic auth (`[email protected]:<api_key>`), and the following form-data content in the body: full_name: "Foo" short_name: "" For instance, using cURL: curl -X POST -u '[email protected]:<api_key>' \ -F 'full_name=Foo' -F 'short_name=' \ http://localhost:9991/api/v1/bots 4. The server will reply with an HTTP 200 status code (success). 5. The result is a bot with an empty `short_name`, that looks like this in the bot settings UI: ![Bot with empty username](https://user-images.githubusercontent.com/7356565/27352532-59a34794-5601-11e7-8427-b35e4fddfc06.png) Meanwhile, the server should reply with an HTTP 400 (bad request), and an error message indicating that the format is invalid: {"result":"error", "msg":"Bad name or username"} --- @zulipbot label "area: api" "bug"
Hello @zulip/server-api members, this issue was labeled with the **area: api** label, so you may want to check it out! @zulipbot claim
2017-06-21T12:41:05
zulip/zulip
5,615
zulip__zulip-5615
[ "5544" ]
e1ce3daaffc42a883d9520be381d21ef5262554a
diff --git a/tools/linter_lib/custom_check.py b/tools/linter_lib/custom_check.py --- a/tools/linter_lib/custom_check.py +++ b/tools/linter_lib/custom_check.py @@ -385,6 +385,12 @@ def custom_check_file(fn, rules, skip_rules=None, max_length=None): 'description': "Period should be part of the translatable string."}, {'pattern': "{{/tr}}[\.\?!]", 'description': "Period should be part of the translatable string."}, + {'pattern': "{{#tr.*}}.*{{.*{{/tr}}", + 'exclude_line': set([ + ('static/templates/subscription_settings.handlebars', + '{{#if subscribed }}{{#tr oneself }}Unsubscribe{{/tr}}{{else}}{{#tr oneself }}Subscribe{{/tr}}{{/if}}</button>'), + ]), + 'description': "Translated messages should not contain handlebars."}, ] jinja2_rules = html_rules + [ {'pattern': "{% endtrans %}[\.\?!]",
diff --git a/frontend_tests/node_tests/templates.js b/frontend_tests/node_tests/templates.js --- a/frontend_tests/node_tests/templates.js +++ b/frontend_tests/node_tests/templates.js @@ -635,6 +635,7 @@ function render(template_name, args) { content: 'This is message one.', last_edit_timestr: '11:00', starred: true, + starred_status: "Unstar", }, };
Translated messages in Handlebars templates should never contain handlebars When a Handlebars template contains something like this: ``` {{#tr this}}Your email is {{ page_params.email }}.{{/tr}} ``` it results in the inside expression (`{{ page_params.email }}`) being evaluated and substituted *first*, and only then the result being looked up for translation. That's rarely going to work, and we should just never do it. The correct way to write this, documented in [our frontend i18n docs](http://zulip.readthedocs.io/en/latest/translating.html#frontend-translations), is with a `__`-marked variable to be substituted by `i18next` itself: ``` {{#tr this}}Your email is __email__.{{/tr}} ``` (That or reword to separate the message from the variable, as say a label for a field; that'd be the right thing in this simple example.) There are a handful of cases of this today. We should eliminate them and enforce with a linter that we don't make more.
@umairwaheed can you take this one? Yeah. @zulipbot claim.
2017-06-29T06:46:22
zulip/zulip
5,631
zulip__zulip-5631
[ "5544" ]
2012913cc13332aa8c14825a042ea11b4b2cfa79
diff --git a/tools/linter_lib/custom_check.py b/tools/linter_lib/custom_check.py --- a/tools/linter_lib/custom_check.py +++ b/tools/linter_lib/custom_check.py @@ -385,12 +385,6 @@ def custom_check_file(fn, rules, skip_rules=None, max_length=None): 'description': "Period should be part of the translatable string."}, {'pattern': "{{/tr}}[\.\?!]", 'description': "Period should be part of the translatable string."}, - {'pattern': "{{#tr.*}}.*{{.*{{/tr}}", - 'exclude_line': set([ - ('static/templates/subscription_settings.handlebars', - '{{#if subscribed }}{{#tr oneself }}Unsubscribe{{/tr}}{{else}}{{#tr oneself }}Subscribe{{/tr}}{{/if}}</button>'), - ]), - 'description': "Translated messages should not contain handlebars."}, ] jinja2_rules = html_rules + [ {'pattern': "{% endtrans %}[\.\?!]",
Translated messages in Handlebars templates should never contain handlebars When a Handlebars template contains something like this: ``` {{#tr this}}Your email is {{ page_params.email }}.{{/tr}} ``` it results in the inside expression (`{{ page_params.email }}`) being evaluated and substituted *first*, and only then the result being looked up for translation. That's rarely going to work, and we should just never do it. The correct way to write this, documented in [our frontend i18n docs](http://zulip.readthedocs.io/en/latest/translating.html#frontend-translations), is with a `__`-marked variable to be substituted by `i18next` itself: ``` {{#tr this}}Your email is __email__.{{/tr}} ``` (That or reword to separate the message from the variable, as say a label for a field; that'd be the right thing in this simple example.) There are a handful of cases of this today. We should eliminate them and enforce with a linter that we don't make more.
@umairwaheed can you take this one? Yeah. @zulipbot claim. Thanks @umairwaheed ! This is a little trickier to deal with than that commit in #5615 will handle, though -- in particular because translated strings can be multi-line. For example, with `git grep -PA4 '\{\{\#tr[^{]*$'` I find this example in `static/templates/settings/bot-settings.handlebars`: ``` {{#tr this}} Looking for our <a href="/integrations" target="_blank">Integrations</a> or <a href="{{ server_uri }}/api" target="_blank">API</a> documentation? {{/tr}} ``` I'm also a bit concerned about the exception that had to be added in that commit -- there'll probably be other cases in the future where people have multiple small `{{#tr}}` blocks on the same line, and it's always awkward for a linter to fire a false positive because it's confusing and then it erodes people's trust in the linter. I think it's enough to have a rule that says, after a `{{#tr}}` block starts, the next handlebars have to be the close of that block. Here's one way to do that with a regex -- it's important that it runs across the whole file, not one line at a time: ``` $ git ls-files | grep handlebars$ | xargs perl -0ne 'if (m, \{\{\s*\#tr (?: [^{] | \{[^{] )* \{\{(?!/tr\}\}) .* ,x) { print "$ARGV:$.:\n"; print "$&\n\n"; }' static/templates/settings/bot-settings.handlebars:44: {{#tr this}} Looking for our <a href="/integrations" target="_blank">Integrations</a> or <a href="{{ server_uri }}/api" target="_blank">API</a> documentation? static/templates/single_message.handlebars:62: {{#tr this}}{{#if msg/starred}}Unstar{{else}}Star{{/if}} this message{{/tr}}"> ``` Both of those examples should be prohibited, so that's good. In Python, the `re.VERBOSE` aka `re.X` flag lets you write complex regexes with whitespace for clarity like I did above, or even with comments. We should be able to handle this somewhat gracefully using our template parser. Hmm, in that case our `tools/lint` cannot handle this. Like Steve said, I think we should handle this like we handle capitalization. Let me see what I can do.
2017-06-30T05:12:42
zulip/zulip
5,866
zulip__zulip-5866
[ "5863" ]
9782911acaa45e39e06a8fa34fb5facc63c20a4f
diff --git a/tools/lib/template_parser.py b/tools/lib/template_parser.py --- a/tools/lib/template_parser.py +++ b/tools/lib/template_parser.py @@ -289,6 +289,7 @@ def is_django_block_tag(tag): 'blocktrans', 'trans', 'raw', + 'with', ] def get_handlebars_tag(text, i):
diff --git a/tools/tests/test_template_parser.py b/tools/tests/test_template_parser.py --- a/tools/tests/test_template_parser.py +++ b/tools/tests/test_template_parser.py @@ -77,6 +77,15 @@ def test_validate_django(self): ''' validate(text=my_html) + my_html = ''' + {% block "content" %} + {% with className="class" %} + {% include 'foobar' %} + {% endwith %} + {% endblock %} + ''' + validate(text=my_html) + def test_validate_no_start_tag(self): # type: () -> None my_html = '''
linting: Linter trips on "with" tags in Jinja. Currently the code below fails with the error: ``` templates | Traceback (most recent call last): templates | File "tools/check-templates", line 168, in <module> templates | check_our_files(args.modified, args.all_dups, args.targets) templates | File "tools/check-templates", line 36, in check_our_files templates | check_html_templates(by_lang['html'], args.all_dups) templates | File "tools/check-templates", line 115, in check_html_templates templates | validate(fn=fn, check_indent=(fn not in bad_files)) templates | File "/srv/zulip/tools/lib/template_parser.py", line 270, in validate templates | state.matcher(token) templates | File "/srv/zulip/tools/lib/template_parser.py", line 247, in f templates | ''' % (fn, problem, start_token.s, start_line, start_col, end_tag, end_line, end_col)) templates | lib.template_parser.TemplateParserException: templates | fn: templates/zerver/features.html templates | Mismatched tag. templates | start: templates | {% block portico_content %} templates | line 11, col 1 templates | end tag: templates | with templates | line 15, col 5 templates | ``` This seems to be because the linter doesn't understand the "with" and "endwith" tag being used in the templates. Here's the following code to trigger it: ```html {% block portico_content %} {% with className="white" %} {% include 'zerver/landing_nav.html' %} {% endwith %} {% endblock %} ```
Hello @zulip/server-tooling members, this issue was labeled with the **area: tooling** label, so you may want to check it out! @adnrs96 is this easy to fix? from the looks of it, feels like an easy fix. I am trying it out right now.
2017-07-19T19:13:12
zulip/zulip
6,230
zulip__zulip-6230
[ "6211" ]
2b4faaa847a7635b60233ca364a2dcac1caa637e
diff --git a/tools/documentation_crawler/documentation_crawler/spiders/check_documentation.py b/tools/documentation_crawler/documentation_crawler/spiders/check_documentation.py --- a/tools/documentation_crawler/documentation_crawler/spiders/check_documentation.py +++ b/tools/documentation_crawler/documentation_crawler/spiders/check_documentation.py @@ -2,7 +2,7 @@ from __future__ import print_function import os -import pathlib2 +import pathlib from typing import List @@ -16,7 +16,7 @@ def get_start_url(): start_file = os.path.join(dir_path, os.path.join(*[os.pardir] * 4), "docs/_build/html/index.html") return [ - pathlib2.Path(os.path.abspath(start_file)).as_uri() + pathlib.Path(os.path.abspath(start_file)).as_uri() ]
Remove unnecessary pathlib2 dependency We have pathlib2 and scandir as dependencies just because of the code here to construct a `file://` URL: `/tools/documentation_crawler/documentation_crawler/spiders/check_documentation.py` (That's run as part of `tools/test-documentation`). We should use some simpler mechanism in Python to do this and remove the unnecessary dependencies.
Hello @zulip/server-dependencies, @zulip/server-tooling members, this issue was labeled with the **area: dependencies**, **area: tooling** labels, so you may want to check it out! @rht FYI, this might be a good follow-up to your recent work. One particular advantage of fixing this is that `pathlib2` only depends on `scandir` for Python < 3.5. Which means depending which Python version you run `update-locked-requirements` with, it may or may not be included as a listed dependency. Since we support 3.4 and 3.5, we can defer dealing with the somewhat ugly work on having separate lock files for different Python versions if we just remove this. Also worth noting that we'll eventually want separate lock files per-version, but it might be years before we actually need that, so I'd prefer to just remove the unnecessary dependency instead.
2017-08-23T09:37:15
zulip/zulip
6,272
zulip__zulip-6272
[ "6224" ]
ab61fe5922098b5c7dcd1921a11e18c837c8fc4a
diff --git a/zerver/lib/notifications.py b/zerver/lib/notifications.py --- a/zerver/lib/notifications.py +++ b/zerver/lib/notifications.py @@ -367,7 +367,11 @@ def handle_missedmessage_emails(user_profile_id, missed_email_events): messages_by_recipient_subject = defaultdict(list) # type: Dict[Tuple[int, Text], List[Message]] for msg in messages: - messages_by_recipient_subject[(msg.recipient_id, msg.topic_name())].append(msg) + if msg.recipient.type == Recipient.PERSONAL: + # For PM's group using (recipient, sender). + messages_by_recipient_subject[(msg.recipient_id, msg.sender_id)].append(msg) + else: + messages_by_recipient_subject[(msg.recipient_id, msg.topic_name())].append(msg) message_count_by_recipient_subject = { recipient_subject: len(msgs)
diff --git a/zerver/tests/test_notifications.py b/zerver/tests/test_notifications.py --- a/zerver/tests/test_notifications.py +++ b/zerver/tests/test_notifications.py @@ -352,3 +352,29 @@ def test_stream_link_in_missed_message(self, mock_random_token): body = '<a class="stream" data-stream-id="5" href="http://testserver/#narrow/stream/Verona">#Verona</a' subject = 'Othello, the Moor of Venice sent you a message' self._test_cases(tokens, msg_id, body, subject, send_as_user=False, verify_html_body=True) + + @patch('zerver.lib.email_mirror.generate_random_token') + def test_multiple_missed_personal_messages(self, mock_random_token): + # type: (MagicMock) -> None + tokens = self._get_tokens() + mock_random_token.side_effect = tokens + + hamlet = self.example_user('hamlet') + msg_id_1 = self.send_message(self.example_email('othello'), + hamlet.email, + Recipient.PERSONAL, + 'Personal Message 1') + msg_id_2 = self.send_message(self.example_email('iago'), + hamlet.email, + Recipient.PERSONAL, + 'Personal Message 2') + + handle_missedmessage_emails(hamlet.id, [ + {'message_id': msg_id_1}, + {'message_id': msg_id_2}, + ]) + self.assertEqual(len(mail.outbox), 2) + subject = 'Iago sent you a message' + self.assertEqual(mail.outbox[0].subject, subject) + subject = 'Othello, the Moor of Venice sent you a message' + self.assertEqual(mail.outbox[1].subject, subject)
Stop threading PM threads with multiple groups of users in a single email Occasionally, a missed_message email will have emails from multiple different PM senders with you as the recipient mixed together in a single email. This is a confusing user experience that might make users worry about security issues. I think the bug is that `handle_missedmessage_emails` groups messages by `(recipient_id, topic)` pairs, but PMs directly to you actually all have your user ID as the `recipient_id`. So I think we need to just make that grouping use e.g. the sender message ID as the second part of the tuple for PMs. It'd also be great to add a test for this sort of situation if we can do so easily.
@zulipbot claim
2017-08-25T02:17:25
zulip/zulip
6,401
zulip__zulip-6401
[ "6398" ]
5045e53d63fed5ea32da3ba23f71380046e6fc0a
diff --git a/tools/linter_lib/custom_check.py b/tools/linter_lib/custom_check.py --- a/tools/linter_lib/custom_check.py +++ b/tools/linter_lib/custom_check.py @@ -135,6 +135,12 @@ def custom_check_file(fn, identifier, rules, color, skip_rules=None, max_length= 'description': 'The module blueslip has no function warning, try using blueslip.warn'}, {'pattern': '[)]{$', 'description': 'Missing space between ) and {'}, + {'pattern': 'i18n\.t\([^)]+[^,\{]$', + 'description': 'i18n string should not be a multiline string'}, + {'pattern': 'i18n\.t([^)]+?\+.+?)', + 'description': 'Do not concatenate i18n strings'}, + {'pattern': 'i18n\.t([^+]+?).+?\+', + 'description': 'Do not concatenate i18n strings'}, {'pattern': '["\']json/', 'description': 'Relative URL for JSON route not supported by i18n'}, # This rule is constructed with + to avoid triggering on itself diff --git a/zerver/management/commands/makemessages.py b/zerver/management/commands/makemessages.py --- a/zerver/management/commands/makemessages.py +++ b/zerver/management/commands/makemessages.py @@ -55,9 +55,9 @@ '{{\s*t "(.*?)"\W*}}', "{{\s*t '(.*?)'\W*}}", "i18n\.t\('([^\']*?)'\)", - "i18n\.t\('(.*?)',.*?[^,]\)", + "i18n\.t\('(.*?)',\s*.*?[^,]\)", 'i18n\.t\("([^\"]*?)"\)', - 'i18n\.t\("(.*?)",.*?[^,]\)', + 'i18n\.t\("(.*?)",\s*.*?[^,]\)', ] frontend_compiled_regexes = [re.compile(regex) for regex in regexes]
Some messages in JS code don't make it to the list to be translated I first noticed this with the "You subscribed to stream __stream__" message that appears between messages in a (public) stream/topic at the point where you subscribed. It's not in any `translations.json`, and consequently doesn't make it to Transifex and doesn't get translated. Looking at the source, the `i18n.t` call continues to the next line, so I suspect that's not a coincidence: ``` $ rg -A1 'You subscribed' static/js/ static/locale/ja/ static/js/message_list.js 360: return i18n.t("You subscribed to stream __stream__", 361- {stream: stream_name}); ``` There are a few other messages like that (`git grep 'i18n.t[^)]*$' static/js/`). Some work, like this one (actually this is also busted, in that we end up translating just the first part, but that's a different symptom): ``` $ rg -A1 'Upload would exceed' static/js static/locale/ja/ static/js/compose.js 769: msg = i18n.t("Upload would exceed your maximum quota." 770- + " Consider deleting some previously uploaded files."); static/locale/ja/LC_MESSAGES/django.po 2437:msgid "Upload would exceed your maximum quota." 2438-msgstr "" ``` But most of them don't. This is a bug somewhere in our makemessages chain -- or if it's hard to make `gettext` find these and we'd rather just not do them, then we should have a linter to prevent them.
Hello @zulip/server-i18n members, this issue was labeled with the **area: i18n** label, so you may want to check it out! Will check it out. @zulipbot, claim. Seems like the issue with our `makemessages` regexes.
2017-09-05T08:04:48
zulip/zulip
6,501
zulip__zulip-6501
[ "6320" ]
0da74f4d6da8d51d14d4ad24d8ea6f9ffa4ac650
diff --git a/tools/linter_lib/custom_check.py b/tools/linter_lib/custom_check.py --- a/tools/linter_lib/custom_check.py +++ b/tools/linter_lib/custom_check.py @@ -131,7 +131,9 @@ def build_custom_checkers(by_lang): 'description': 'Fix trailing whitespace'}, {'pattern': '^#+[A-Za-z0-9]', 'strip': '\n', - 'description': 'Missing space after # in heading'}, + 'description': 'Missing space after # in heading', + 'good_lines': ['### some heading', '# another heading'], + 'bad_lines': ['###some heading', '#another heading']}, ] # type: RuleList js_rules = cast(RuleList, [ {'pattern': '[^_]function\(',
diff --git a/tools/tests/test_linter_custom_check.py b/tools/tests/test_linter_custom_check.py --- a/tools/tests/test_linter_custom_check.py +++ b/tools/tests/test_linter_custom_check.py @@ -1,17 +1,18 @@ import os from itertools import chain -from mock import patch, MagicMock +from mock import patch from unittest import TestCase from typing import Any, Dict, List from tools.linter_lib.custom_check import build_custom_checkers +from tools.linter_lib.custom_check import custom_check_file ROOT_DIR = os.path.abspath(os.path.join(__file__, '..', '..', '..')) CHECK_MESSAGE = "Fix the corresponding rule in `tools/linter_lib/custom_check.py`." -class TestCustomRulesFormat(TestCase): +class TestCustomRules(TestCase): def setUp(self): # type: () -> None @@ -45,3 +46,22 @@ def test_paths_in_rules(self): if not os.path.splitext(path)[1]: self.assertTrue(path.endswith('/'), "The path '{}' should end with '/'. {}".format(path, CHECK_MESSAGE)) + + def test_rule_patterns(self): + # type: () -> None + """Verifies that the search regex specified in a custom rule actually matches + the expectation and doesn't throw false positives.""" + for rule in self.all_rules: + pattern = rule['pattern'] + for line in rule.get('good_lines', []): + # create=True is superfluous when mocking built-ins in Python >= 3.5 + with patch('builtins.open', return_value=iter((line+'\n\n').splitlines()), create=True, autospec=True): + self.assertFalse(custom_check_file('foo.bar', 'baz', [rule], ''), + "The pattern '{}' matched the line '{}' while it shouldn't.".format(pattern, line)) + + for line in rule.get('bad_lines', []): + # create=True is superfluous when mocking built-ins in Python >= 3.5 + with patch('builtins.open', + return_value=iter((line+'\n\n').splitlines()), create=True, autospec=True), patch('builtins.print'): + self.assertTrue(custom_check_file('foo.bar', 'baz', [rule], ''), + "The pattern '{}' didn't match the line '{}' while it should.".format(pattern, line))
Add test suite for regex linters Since we now have ~100 custom lint rules, it'd be good to have a test suite for them. I'm thinking a simple test suite where you can declare a list like this: ``` good_lines = [ ("foo.py", "foo = [] # type", "Error: Missing space before '#'" ``` where each tuple is a filename, line, and then some text in the expected error message. I imagine we'd start with some simple small as part of `tools/test-tools` and then add things as we went. (Note I made up the error message in the example above; it's probably off from the real one) The cool is in `tools/linter_lib`; tagging this as bite size since I don't think one needs any special Zulip expertise to do this :).
Hello @zulip/server-tooling members, this issue was labeled with the **area: tooling** label, so you may want to check it out! @zulipbot claim Congratulations, @shailysangwan, you've successfully claimed your first issue! Contributing to Zulip will (hopefully) be a fun and rewarding experience, and I'm here to help get you off to a good start. First things first: * Join me on the [Zulip developers' server](https://chat.zulip.org), to get help, chat about this issue, and meet the other developers. * Sign the [Dropbox Contributor License Agreement](https://opensource.dropbox.com/cla/), so that Zulip can use your code. * [Unwatch this repository](https://help.github.com/articles/unwatching-repositories/), so that you don't get 100 emails a day. As you work on this issue, you'll also want to refer to the [Zulip code contribution guide](http://zulip.readthedocs.io/en/latest/index.html#code-docs), as well as the rest of the developer documentation on that site. See you on the other side (that is, the pull request side)! Seeing this just made me realize the issue's similarity to #5406 . I submitted #6472 for adding a test suite just like described here, with the difference that it doesn't test the rules themselves yet, only the validity of the filenames. Positing here to avoid any potential duplicate work. @shailysangwan @timabbott how do we want to proceed here? #6472 provides a test file, which is a great start. I think there's probably 2 things that need to happen here. First, we need to write a test system for the rule tests, which has some nice helper function supporting tests like what I described above. Then, we can add a bunch of tests, and ideally some sort of meta-test that can be used eventually to check if we have "test coverage" of all the rules, if we can figure out a clean way to do that. Thinking about this a bit more, I think it's possible we want want the system to be that you add a line in the actual rule definition like this for the "two spaces before comment" test (with a list of lines that pass/fail): `test_bad: ['x = 1 # comment']` `test_good: ['x = 1 # comment']` and the test system would confirm that the test fired that way on that particular rule (I think we can just assume the file matches the filter rules if it's being checked, and I don't think we want to test the descriptions, since that's basically just duplicate work). Or something along those lines. Should be able to cover 95% of the lint rules this way in a pretty compact fashion. @derAnfaenger since you have some experience with writing test frameworks at this point, maybe you should work on adding the framework, and then @shailysangwan can review and help with adding individual tests?
2017-09-13T16:12:02
zulip/zulip
6,979
zulip__zulip-6979
[ "6978" ]
4ab8337b2b8bd0acc7d910fcee5e6aa957b869e6
diff --git a/zerver/lib/bugdown/__init__.py b/zerver/lib/bugdown/__init__.py --- a/zerver/lib/bugdown/__init__.py +++ b/zerver/lib/bugdown/__init__.py @@ -1530,7 +1530,8 @@ def get_full_name_info(realm_id, full_names): } rows = UserProfile.objects.filter( - realm_id=realm_id + realm_id=realm_id, + is_active=True, ).filter( functools.reduce(lambda a, b: a | b, q_list), ).values(
diff --git a/zerver/tests/test_bugdown.py b/zerver/tests/test_bugdown.py --- a/zerver/tests/test_bugdown.py +++ b/zerver/tests/test_bugdown.py @@ -10,6 +10,7 @@ ) from zerver.lib.alert_words import alert_words_in_realm from zerver.lib.camo import get_camo_url +from zerver.lib.create_user import create_user from zerver.lib.emoji import get_emoji_url from zerver.lib.mention import possible_mentions from zerver.lib.message import render_markdown @@ -34,6 +35,7 @@ Realm, RealmFilter, Recipient, + UserProfile, ) import copy @@ -166,6 +168,39 @@ def bugdown_convert(text): # type: (Text) -> Text return bugdown.convert(text, message_realm=get_realm('zulip')) +class BugdownMiscTest(ZulipTestCase): + def test_get_full_name_info(self): + # type: () -> None + realm = get_realm('zulip') + + def make_user(email, full_name): + # type: (Text, Text) -> UserProfile + return create_user( + email=email, + password='whatever', + realm=realm, + full_name=full_name, + short_name='whatever', + ) + + fred1 = make_user('[email protected]', 'Fred Flintstone') + fred1.is_active = False + fred1.save() + + fred2 = make_user('[email protected]', 'Fred Flintstone') + + fred3 = make_user('[email protected]', 'Fred Flintstone') + fred3.is_active = False + fred3.save() + + dct = bugdown.get_full_name_info(realm.id, {'Fred Flintstone', 'cordelia LEAR', 'Not A User'}) + self.assertEqual(set(dct.keys()), {'fred flintstone', 'cordelia lear'}) + self.assertEqual(dct['fred flintstone'], dict( + email='[email protected]', + full_name='Fred Flintstone', + id=fred2.id + )) + class BugdownTest(ZulipTestCase): def load_bugdown_tests(self): # type: () -> Tuple[Dict[Text, Any], List[List[Text]]]
User mentions incorrectly considers deactivated users If there's 2 users with the same name, one of them deactivated, mentioning can end up targeting the wrong user. This seems to be a regression in `get_full_name_info`, which doesn't filter on `is_active=True`. @showell can you take this one? Seems like it should be quick to fix and test.
Hello @zulip/server-markdown members, this issue was labeled with the **area: markdown** label, so you may want to check it out!
2017-10-12T22:15:38
zulip/zulip
7,419
zulip__zulip-7419
[ "165" ]
5bbc46762f52ccb18a0fc8837bce65a28602f97c
diff --git a/tools/lib/capitalization.py b/tools/lib/capitalization.py --- a/tools/lib/capitalization.py +++ b/tools/lib/capitalization.py @@ -20,6 +20,7 @@ r"Dropbox", r"GitHub", r"Google", + r"Hamlet", r"HTTP", r"ID", r"IDs", diff --git a/tools/linter_lib/custom_check.py b/tools/linter_lib/custom_check.py --- a/tools/linter_lib/custom_check.py +++ b/tools/linter_lib/custom_check.py @@ -503,7 +503,9 @@ def build_custom_checkers(by_lang): 'exclude_line': [('templates/zerver/register.html', 'placeholder="acme"'), ('templates/zerver/register.html', 'placeholder="Acme or Aκμή"'), ('static/templates/settings/realm-domains-modal.handlebars', - '<td><input type="text" class="new-realm-domain" placeholder="acme.com"></input></td>')], + '<td><input type="text" class="new-realm-domain" placeholder="acme.com"></input></td>'), + ("static/templates/user-groups-admin.handlebars", + '<input type="text" name="name" id="user_group_name" placeholder="hamletcharacters" />')], 'exclude': set(["static/templates/settings/emoji-settings-admin.handlebars", "static/templates/settings/realm-filter-settings-admin.handlebars", "static/templates/settings/bot-settings.handlebars", diff --git a/zproject/settings.py b/zproject/settings.py --- a/zproject/settings.py +++ b/zproject/settings.py @@ -1079,6 +1079,7 @@ def get_secret(key: str) -> None: 'js/settings_streams.js', 'js/settings_filters.js', 'js/settings_invites.js', + 'js/settings_user_groups.js', 'js/settings.js', 'js/admin_sections.js', 'js/admin.js',
diff --git a/frontend_tests/node_tests/templates.js b/frontend_tests/node_tests/templates.js --- a/frontend_tests/node_tests/templates.js +++ b/frontend_tests/node_tests/templates.js @@ -283,6 +283,31 @@ function render(template_name, args) { global.write_handlebars_output("admin_tab", html); }()); +(function admin_user_group_list() { + var args = { + user_group: { + id: "9", + name: "uranohoshi", + description: "Students at Uranohoshi Academy", + }, + }; + + var html = ''; + html += '<div id="user-groups">'; + html += render('admin_user_group_list', args); + html += '</div>'; + + global.write_handlebars_output('admin_user_group_list', html); + + var group_id = $(html).find('.user-group:first').prop('id'); + var group_name = $(html).find('.user-group:first .pill-container').attr('data-group-pills'); + var group_description = $(html).find('.user-group:first h4').text().trim().replace(/\s+/g, ' '); + + assert.equal(group_id, '9'); + assert.equal(group_name, 'uranohoshi'); + assert.equal(group_description, 'uranohoshi — Students at Uranohoshi Academy'); +}()); + (function admin_user_list() { var html = '<table>'; var users = ['alice', 'bob', 'carl']; diff --git a/frontend_tests/node_tests/user_groups.js b/frontend_tests/node_tests/user_groups.js --- a/frontend_tests/node_tests/user_groups.js +++ b/frontend_tests/node_tests/user_groups.js @@ -1,8 +1,18 @@ set_global('blueslip', {}); +set_global('page_params', {}); zrequire('user_groups'); (function test_user_groups() { + var students = { + name: 'Students', + id: 0, + }; + global.page_params.realm_user_groups = [students]; + + user_groups.initialize(); + assert.equal(user_groups.get_user_group_from_id(students.id), students); + var admins = { name: 'Admins', id: 1, @@ -11,14 +21,22 @@ zrequire('user_groups'); name: 'Everyone', id: 2, }; + user_groups.add(admins); assert.equal(user_groups.get_user_group_from_id(admins.id), admins); + var called = false; global.blueslip.error = function (msg) { assert.equal(msg, "Unknown group_id in get_user_group_from_id: " + all.id); called = true; }; + assert.equal(user_groups.get_user_group_from_id(all.id), undefined); assert(called); -}()); + user_groups.remove(students); + global.blueslip.error = function (msg) { + assert.equal(msg, "Unknown group_id in get_user_group_from_id: " + students.id); + }; + assert.equal(user_groups.get_user_group_from_id(students.id), undefined); +}());
Add user-definable groups [summarizing a Zulip discussion] Add a way for users (or, at the very least, administrators) to define their own groups, such that someone can write `@group1` to notify everyone in `group1`. Design complication: what should happen when `group1` is (say) 100 members, but someone says `@group1` in a stream to which only 30 of those members are subscribed? Should the sender get a prompt asking whether the other 70 people should be subscribed to the stream?
why not use a stream instead? whoever subscribed to it is in the group... @ritschwumm It could be helpful to tag a specific group in a discussion that's open to a wider audience. For example, you might want to tag the support team in a discussion among all company employees. As a workaround, you can simulate that today using the "alert words" feature -- just have all the members of a team all add `@group1` to their list of alert words.
2017-11-16T22:03:58
zulip/zulip
7,463
zulip__zulip-7463
[ "7417" ]
8f6b39a1c0ed96e523c71b2c8682731a2b652b28
diff --git a/docs/conf.py b/docs/conf.py --- a/docs/conf.py +++ b/docs/conf.py @@ -34,6 +34,8 @@ extensions = [] # type: List[str] # Add any paths that contain templates here, relative to this directory. +# For more info about changes in _templates/layout.html, +# see https://github.com/zulip/zulip/pull/7463. templates_path = ['_templates'] # The encoding of source files. diff --git a/tools/linter_lib/custom_check.py b/tools/linter_lib/custom_check.py --- a/tools/linter_lib/custom_check.py +++ b/tools/linter_lib/custom_check.py @@ -489,7 +489,8 @@ def build_custom_checkers(by_lang): {'pattern': "aria-label='[^{]", 'description': "`aria-label` value should be translatable."}, {'pattern': 'aria-label="[^{]', - 'description': "`aria-label` value should be translatable."}, + 'description': "`aria-label` value should be translatable.", + 'exclude': set(["docs/_templates/layout.html"])}, {'pattern': 'script src="http', 'description': "Don't directly load dependencies from CDNs. See docs/subsystems/front-end-build-process.md"}, {'pattern': "title='[^{]",
Clean out articles not referenced in the developer documentation TOC This is a plan for how to clean up the remaining files in our ReadTheDocs that aren't properly managedin the TOC. We should probably just delete these: * /home/tabbott/zulip/docs/brief-install-vagrant-dev.md We should just add these to the TOC: * /home/tabbott/zulip/docs/code-of-conduct.md: maybe move it to contributing/ * /home/tabbott/zulip/docs/migration-renumbering.md: Move to subsystems/ * /home/tabbott/zulip/docs/production/email.md: Just add to production TOC * /home/tabbott/zulip/docs/production/ssl-certificates.md: Just add to production TOC * /home/tabbott/zulip/docs/subsystems/conversion.md: Just add to subsystems TOC * /home/tabbott/zulip/docs/subsystems/input-pills.md: Just add to subsystems TOC * /home/tabbott/zulip/docs/subsystems/unread_messages.md: Just add to subsystems TOC * /home/tabbott/zulip/docs/users.md: Move to subsystems and add to subsystems TOC These we probably actually just don't want in the TOC, and should figure out how to exclude from this warning: * /home/tabbott/zulip/docs/production/expensive-migrations.md: * /home/tabbott/zulip/docs/production/install-existing-server.md: * /home/tabbott/zulip/docs/production/multiple-organizations.md * /home/tabbott/zulip/docs/password-strength.md: Should move under production/ for better structure too. * /home/tabbott/zulip/docs/request-remote-dev.md: Move to development/request-remote.md first. Make sure to update zulip-gci repo too.
Hello @zulip/server-development members, this issue was labeled with the **area: documentation (developer)** label, so you may want to check it out! @zulipbot claim Hey @Balaji2198, I can jump in and help with the few remaining files if you don't mind. I helped with the recent docs migration so I am already familiar with the clean up that needs to be done here. Just to summarize, I think this is all that we have left to do: Move the following files and update broken links accordingly * docs/password-strength.md -> docs/production/password-strength.md Exclude the following files from warnings saying document's not included in TOC * docs/production/expensive-migrations.md * docs/production/install-existing-server.md * docs/production/multiple-organizations.md * docs/production/password-strength.md docs/development/request-remote.md Let's skip moving `code-of-conduct` for now. I think it's possible we might want to leave it (and possibly some some things currently in "overview" out to the top level, so they're extra visible. (Also, I think I moved request-remote) Okay, got it. So does that also mean you want `code-of-conduct.md` added to the TOC? Do you want it above or below "Overview" on the nav bar? I'm not sure; I was sorta thinking this would go with exploding "overview" to all be top-level (or mostly be top-level), so wanted to procrasinate a bit on figuring that out. Okay cool, yeah that would make sense. The red warning for code-of-conduct.md will remind us about it later :) Getting rid of the warnings while having the files not show up in the TOC is not very well documented in sphinx though. So I went ahead and made a PR that should complete this cleanup.
2017-11-21T00:39:02
zulip/zulip
7,569
zulip__zulip-7569
[ "7460" ]
16d8244c0a917bf10f4f376297107e50bd514d05
diff --git a/zerver/lib/notifications.py b/zerver/lib/notifications.py --- a/zerver/lib/notifications.py +++ b/zerver/lib/notifications.py @@ -366,7 +366,7 @@ def do_send_missedmessage_events_reply_in_zulip(user_profile: UserProfile, 'from_address': from_address, 'reply_to_email': formataddr((reply_to_name, reply_to_address)), 'context': context} - queue_json_publish("missedmessage_email_senders", email_dict) + queue_json_publish("email_senders", email_dict) user_profile.last_reminder = timezone_now() user_profile.save(update_fields=['last_reminder']) diff --git a/zerver/signals.py b/zerver/signals.py --- a/zerver/signals.py +++ b/zerver/signals.py @@ -9,6 +9,7 @@ get_current_timezone_name as timezone_get_current_timezone_name from django.utils.timezone import now as timezone_now +from zerver.lib.queue import queue_json_publish from zerver.lib.send_email import FromAddress, send_email from zerver.models import UserProfile @@ -82,6 +83,10 @@ def email_on_new_login(sender: Any, user: UserProfile, request: Any, **kwargs: A context['device_info'] = device_info context['user'] = user - send_email('zerver/emails/notify_new_login', to_user_id=user.id, - from_name='Zulip Account Security', from_address=FromAddress.NOREPLY, - context=context) + email_dict = { + 'template_prefix': 'zerver/emails/notify_new_login', + 'to_user_id': user.id, + 'from_name': 'Zulip Account Security', + 'from_address': FromAddress.NOREPLY, + 'context': context} + queue_json_publish("email_senders", email_dict) diff --git a/zerver/worker/queue_processors.py b/zerver/worker/queue_processors.py --- a/zerver/worker/queue_processors.py +++ b/zerver/worker/queue_processors.py @@ -297,8 +297,8 @@ def consume_batch(self, missed_events: List[Dict[str, Any]]) -> None: for user_profile_id, events in by_recipient.items(): handle_missedmessage_emails(user_profile_id, events) -@assign_queue('missedmessage_email_senders') -class MissedMessageSendingWorker(QueueProcessingWorker): +@assign_queue('email_senders') +class EmailSendingWorker(QueueProcessingWorker): @retry_send_email_failures def consume(self, data): # type: (Dict[str, Any]) -> None @@ -308,6 +308,19 @@ def consume(self, data): # TODO: Do something smarter here .. pass +@assign_queue('missedmessage_email_senders') +class MissedMessageSendingWorker(EmailSendingWorker): + """ + Note: Class decorators are not inherited. + + The `missedmessage_email_senders` queue was used up through 1.7.1, so we + keep consuming from it in case we've just upgraded from an old version. + After the 1.8 release, we can delete it and tell admins to upgrade to 1.8 + first. + """ + # TODO: zulip-1.8: Delete code related to missedmessage_email_senders queue. + pass + @assign_queue('missedmessage_mobile_notifications') class PushNotificationsWorker(QueueProcessingWorker): def consume(self, data):
diff --git a/zerver/tests/test_queue_worker.py b/zerver/tests/test_queue_worker.py --- a/zerver/tests/test_queue_worker.py +++ b/zerver/tests/test_queue_worker.py @@ -17,6 +17,7 @@ from zerver.worker.queue_processors import ( get_active_worker_queues, QueueProcessingWorker, + EmailSendingWorker, LoopQueueProcessingWorker, MissedMessageWorker, ) @@ -171,7 +172,7 @@ def test_email_sending_worker_retries(self) -> None: fake_client = self.FakeClient() data = {'test': 'test', 'id': 'test_missed'} - fake_client.queue.append(('missedmessage_email_senders', data)) + fake_client.queue.append(('email_senders', data)) def fake_publish(queue_name: str, event: Dict[str, Any], @@ -179,7 +180,7 @@ def fake_publish(queue_name: str, fake_client.queue.append((queue_name, event)) with simulated_queue_client(lambda: fake_client): - worker = queue_processors.MissedMessageSendingWorker() + worker = queue_processors.EmailSendingWorker() worker.setup() with patch('zerver.worker.queue_processors.send_email_from_dict', side_effect=smtplib.SMTPServerDisconnected), \ @@ -332,6 +333,7 @@ def __init__(self) -> None: def test_get_active_worker_queues(self) -> None: worker_queue_count = (len(QueueProcessingWorker.__subclasses__()) + + len(EmailSendingWorker.__subclasses__()) + len(LoopQueueProcessingWorker.__subclasses__()) - 1) self.assertEqual(worker_queue_count, len(get_active_worker_queues())) self.assertEqual(1, len(get_active_worker_queues(queue_type='test')))
Use a queue with retry support for sending new-login emails We just got this exception (basically a transient network error trying to send email), which I think prevented the user in question from logging in: ``` Traceback (most recent call last): File "/home/zulip/deployments/2017-11-17-20-18-14/zulip-py3-venv/lib/python3.5/site-packages/django/core/handlers/exception.py", line 41, in inner response = get_response(request) File "/home/zulip/deployments/2017-11-17-20-18-14/zulip-py3-venv/lib/python3.5/site-packages/django/core/handlers/base.py", line 187, in _get_response response = self.process_exception_by_middleware(e, request) File "/home/zulip/deployments/2017-11-17-20-18-14/zulip-py3-venv/lib/python3.5/site-packages/django/core/handlers/base.py", line 185, in _get_response response = wrapped_callback(request, *callback_args, **callback_kwargs) File "./zerver/views/auth.py", line 550, in login_page extra_context=extra_context, **kwargs) File "/home/zulip/deployments/2017-11-17-20-18-14/zulip-py3-venv/lib/python3.5/site-packages/django/contrib/auth/views.py", line 54, in inner return func(*args, **kwargs) File "/home/zulip/deployments/2017-11-17-20-18-14/zulip-py3-venv/lib/python3.5/site-packages/django/contrib/auth/views.py", line 150, in login )(request) File "/home/zulip/deployments/2017-11-17-20-18-14/zulip-py3-venv/lib/python3.5/site-packages/django/views/generic/base.py", line 68, in view return self.dispatch(request, *args, **kwargs) File "/home/zulip/deployments/2017-11-17-20-18-14/zulip-py3-venv/lib/python3.5/site-packages/django/utils/decorators.py", line 67, in _wrapper return bound_func(*args, **kwargs) File "/home/zulip/deployments/2017-11-17-20-18-14/zulip-py3-venv/lib/python3.5/site-packages/django/views/decorators/debug.py", line 76, in sensitive_post_parameters_wrapper return view(request, *args, **kwargs) File "/home/zulip/deployments/2017-11-17-20-18-14/zulip-py3-venv/lib/python3.5/site-packages/django/utils/decorators.py", line 63, in bound_func return func.__get__(self, type(self))(*args2, **kwargs2) File "/home/zulip/deployments/2017-11-17-20-18-14/zulip-py3-venv/lib/python3.5/site-packages/django/utils/decorators.py", line 67, in _wrapper return bound_func(*args, **kwargs) File "/home/zulip/deployments/2017-11-17-20-18-14/zulip-py3-venv/lib/python3.5/site-packages/django/utils/decorators.py", line 149, in _wrapped_view response = view_func(request, *args, **kwargs) File "/home/zulip/deployments/2017-11-17-20-18-14/zulip-py3-venv/lib/python3.5/site-packages/django/utils/decorators.py", line 63, in bound_func return func.__get__(self, type(self))(*args2, **kwargs2) File "/home/zulip/deployments/2017-11-17-20-18-14/zulip-py3-venv/lib/python3.5/site-packages/django/utils/decorators.py", line 67, in _wrapper return bound_func(*args, **kwargs) File "/home/zulip/deployments/2017-11-17-20-18-14/zulip-py3-venv/lib/python3.5/site-packages/django/views/decorators/cache.py", line 57, in _wrapped_view_func response = view_func(request, *args, **kwargs) File "/home/zulip/deployments/2017-11-17-20-18-14/zulip-py3-venv/lib/python3.5/site-packages/django/utils/decorators.py", line 63, in bound_func return func.__get__(self, type(self))(*args2, **kwargs2) File "/home/zulip/deployments/2017-11-17-20-18-14/zulip-py3-venv/lib/python3.5/site-packages/django/contrib/auth/views.py", line 90, in dispatch return super(LoginView, self).dispatch(request, *args, **kwargs) File "/home/zulip/deployments/2017-11-17-20-18-14/zulip-py3-venv/lib/python3.5/site-packages/django/views/generic/base.py", line 88, in dispatch return handler(request, *args, **kwargs) File "/home/zulip/deployments/2017-11-17-20-18-14/zulip-py3-venv/lib/python3.5/site-packages/django/views/generic/edit.py", line 183, in post return self.form_valid(form) File "/home/zulip/deployments/2017-11-17-20-18-14/zulip-py3-venv/lib/python3.5/site-packages/django/contrib/auth/views.py", line 119, in form_valid auth_login(self.request, form.get_user()) File "/home/zulip/deployments/2017-11-17-20-18-14/zulip-py3-venv/lib/python3.5/site-packages/django/contrib/auth/__init__.py", line 160, in login user_logged_in.send(sender=user.__class__, request=request, user=user) File "/home/zulip/deployments/2017-11-17-20-18-14/zulip-py3-venv/lib/python3.5/site-packages/django/dispatch/dispatcher.py", line 193, in send for receiver in self._live_receivers(sender) File "/home/zulip/deployments/2017-11-17-20-18-14/zulip-py3-venv/lib/python3.5/site-packages/django/dispatch/dispatcher.py", line 193, in <listcomp> for receiver in self._live_receivers(sender) File "./zerver/signals.py", line 91, in email_on_new_login context=context) File "./zerver/lib/send_email.py", line 92, in send_email if mail.send() == 0: File "/home/zulip/deployments/2017-11-17-20-18-14/zulip-py3-venv/lib/python3.5/site-packages/django/core/mail/message.py", line 348, in send return self.get_connection(fail_silently).send_messages([self]) File "/home/zulip/deployments/2017-11-17-20-18-14/zulip-py3-venv/lib/python3.5/site-packages/django/core/mail/backends/smtp.py", line 104, in send_messages new_conn_created = self.open() File "/home/zulip/deployments/2017-11-17-20-18-14/zulip-py3-venv/lib/python3.5/site-packages/django/core/mail/backends/smtp.py", line 64, in open self.connection = self.connection_class(self.host, self.port, **connection_params) File "/usr/lib/python3.5/smtplib.py", line 251, in __init__ (code, msg) = self.connect(host, port) File "/usr/lib/python3.5/smtplib.py", line 337, in connect (code, msg) = self.getreply() File "/usr/lib/python3.5/smtplib.py", line 393, in getreply raise SMTPServerDisconnected("Connection unexpectedly closed") smtplib.SMTPServerDisconnected: Connection unexpectedly closed ``` I think the right fix for this is to just move new-login emails to be sent using a queue, like many of our other emails are. Ideally, we'd move this into an existing queue. E.g. possibly `missedmessage_email_senders` should be turned into a more generic `email_senders` worker that several things push emails into.
Hello @zulip/server-notifications, @zulip/server-production members, this issue was labeled with the **area: notifications (other)**, **area: production** labels, so you may want to check it out! <!-- areaLabelNotification --> @umairwaheed can you take this one? Might also be worth reading the last notes on https://github.com/zulip/zulip/issues/6542#issuecomment-347033920 in case it makes sense to fix coverage there as well. Sure. @zulipbot claim.
2017-11-29T08:16:48
zulip/zulip
7,580
zulip__zulip-7580
[ "7563" ]
70681628caa40fac72e0378322afb02f4d7c735a
diff --git a/zerver/worker/queue_processors.py b/zerver/worker/queue_processors.py --- a/zerver/worker/queue_processors.py +++ b/zerver/worker/queue_processors.py @@ -59,6 +59,7 @@ import re import importlib +logger = logging.getLogger(__name__) class WorkerDeclarationException(Exception): pass @@ -232,7 +233,7 @@ def consume(self, data): return referrer = get_user_profile_by_id(data["referrer_id"]) - logging.info("Sending invitation for realm %s to %s" % (referrer.realm.string_id, invitee.email)) + logger.info("Sending invitation for realm %s to %s" % (referrer.realm.string_id, invitee.email)) do_send_confirmation_email(invitee, referrer) # queue invitation reminder for two days from now.
diff --git a/zproject/test_settings.py b/zproject/test_settings.py --- a/zproject/test_settings.py +++ b/zproject/test_settings.py @@ -124,6 +124,7 @@ def set_loglevel(logger_name, level) -> None: set_loglevel('zulip.send_email', 'ERROR') set_loglevel('zerver.lib.digest', 'ERROR') set_loglevel('zerver.lib.email_mirror', 'ERROR') + set_loglevel('zerver.worker.queue_processors', 'WARNING') # Enable file:/// hyperlink support by default in tests ENABLE_FILE_LINKS = True
Eliminate "Sending invitation" output spam in test_signup We're getting close to eliminating output spam in our tests! One of the remaining cases is in `tools/test-backend zerver.tests.test_signup`, a bunch of lines like this: ``` 2017-11-29 01:22:45.669 INFO [] Sending invitation for realm zulip to [email protected] ``` We should work out where those are coming from and clean them up.
Hello @zulip/server-testing members, this issue was labeled with the **area: testing-coverage** label, so you may want to check it out! <!-- areaLabelNotification --> (See #1587 for locking this kind of problem down entirely in the future.) @zulipbot claim
2017-11-30T00:42:18
zulip/zulip
7,643
zulip__zulip-7643
[ "5159" ]
ff435439af091799cbf436f66de92d21c54176d1
diff --git a/zerver/lib/bugdown/__init__.py b/zerver/lib/bugdown/__init__.py --- a/zerver/lib/bugdown/__init__.py +++ b/zerver/lib/bugdown/__init__.py @@ -1078,6 +1078,68 @@ def run(self, lines: List[Text]) -> List[Text]: inserts += 1 return copy +class AutoNumberOListPreprocessor(markdown.preprocessors.Preprocessor): + """ Finds a sequence of lines numbered by the same number""" + RE = re.compile(r'^([ ]*)(\d+)\.[ ]+(.*)') + TAB_LENGTH = 2 + + def run(self, lines): + # type: (List[Text]) -> List[Text] + new_lines = [] # type: List[Text] + current_list = [] # type: List[Match[Text]] + current_indent = 0 + + for line in lines: + m = self.RE.match(line) + + # Remember if this line is a continuation of already started list + is_next_item = (m and current_list + and current_indent == len(m.group(1)) // self.TAB_LENGTH) + + if not is_next_item: + # There is no more items in the list we were processing + new_lines.extend(self.renumber(current_list)) + current_list = [] + + if not m: + # Ordinary line + new_lines.append(line) + elif is_next_item: + # Another list item + current_list.append(m) + else: + # First list item + current_list = [m] + current_indent = len(m.group(1)) // self.TAB_LENGTH + + new_lines.extend(self.renumber(current_list)) + + return new_lines + + def renumber(self, mlist): + # type: (List[Match[Text]]) -> List[Text] + if not mlist: + return [] + + start_number = int(mlist[0].group(2)) + + # Change numbers only if every one is the same + change_numbers = True + for m in mlist: + if int(m.group(2)) != start_number: + change_numbers = False + break + + lines = [] # type: List[Text] + counter = start_number + + for m in mlist: + number = str(counter) if change_numbers else m.group(2) + lines.append('%s%s. %s' % (m.group(1), number, m.group(3))) + counter += 1 + + return lines + # Based on markdown.inlinepatterns.LinkPattern class LinkPattern(markdown.inlinepatterns.Pattern): """ Return a link element from the given match. """ @@ -1397,6 +1459,10 @@ def extendMarkdown(self, md: markdown.Markdown, md_globals: Dict[str, Any]) -> N BugdownUListPreprocessor(md), "_begin") + md.preprocessors.add('auto_number_olist', + AutoNumberOListPreprocessor(md), + "_begin") + md.treeprocessors.add("inline_interesting_links", InlineInterestingLinkProcessor(md, self), "_end") if settings.CAMO_URI:
diff --git a/zerver/fixtures/markdown_test_cases.json b/zerver/fixtures/markdown_test_cases.json --- a/zerver/fixtures/markdown_test_cases.json +++ b/zerver/fixtures/markdown_test_cases.json @@ -217,6 +217,24 @@ "expected_output": "<p>1. A<br>\n 2. B</p>", "text_content": "1. A\n 2. B" }, + { + "name": "auto_renumbered_list", + "input": "1. A\n1. B\n 1. C\n1. D", + "expected_output": "<p>1. A<br>\n2. B<br>\n 3. C<br>\n4. D</p>", + "text_content": "1. A\n2. B\n 3. C\n4. D" + }, + { + "name": "auto_renumbered_list_from", + "input": "3. A\n3. B\n3. C\n3. D", + "expected_output": "<p>3. A<br>\n4. B<br>\n5. C<br>\n6. D</p>", + "text_content": "3. A\n4. B\n5. C\n6. D" + }, + { + "name": "not_auto_renumbered_list", + "input": "1. A\n3. B\n 2. C\n1. D", + "expected_output": "<p>1. A<br>\n3. B<br>\n 2. C<br>\n1. D</p>", + "text_content": "1. A\n3. B\n 2. C\n1. D" + }, { "name": "linkify_interference", "input": "link: xx, x xxxxx xx xxxx xx\n\n[xxxxx #xx](http://xxxxxxxxx:xxxx/xxx/xxxxxx%xxxxxx/xx/):**xxxxxxx**\n\nxxxxxxx xxxxx xxxx xxxxx:\n`xxxxxx`: xxxxxxx\n`xxxxxx`: xxxxx\n`xxxxxx`: xxxxx xxxxx",
Add limited ordered list support to our markdown Markdown has this cool ordered list feature where you can do this: ``` 1. first item 1. second 1. third ``` And it'll auto-renumber them for you to 1/2/3. We can't use existing markdown ordered lists implementations, because they will also turn a solo `2.` into a `1.`, which really sucks if you're splitting a list across multiple messages. But I think we could do something where if you do several of the same number, it does turn it into an ordered list, counting from that number. https://zulip.readthedocs.io/en/latest/markdown.html is a helpful resource for anyone interested in working on this.
Hello @zulip/server-markdown members, this issue was labeled with the **area: markdown** label, so you may want to check it out! @zulipbot claim Welcome to Zulip, @wjkg! We just sent you an invite to collaborate on this repository at https://github.com/zulip/zulip/invitations. Please accept this invite in order to claim this issue and begin a fun, rewarding experience contributing to Zulip! Here's some tips to get you off to a good start: * Join me on the [Zulip developers' server](https://chat.zulip.org), to get help, chat about this issue, and meet the other developers. * Sign the [Dropbox Contributor License Agreement](https://opensource.dropbox.com/cla/), so that Zulip can use your code. * [Unwatch this repository](https://help.github.com/articles/unwatching-repositories/), so that you don't get 100 emails a day. As you work on this issue, you'll also want to refer to the [Zulip code contribution guide](http://zulip.readthedocs.io/en/latest/index.html#code-contribution-guide), as well as the rest of the developer documentation on that site. See you on the other side (that is, the pull request side)! Hello @wjkg, you claimed this issue to work on it, but this issue and any referenced pull requests haven't been updated for 7 days. Are you still working on this issue? If so, please update this issue by leaving a comment on this issue to let me know that you're still working on it. Otherwise, I'll automatically remove you from this issue in 3 days. If you've decided to work on something else, simply comment `@zulipbot abandon` so that someone else can claim it and continue from where you left off. Thank you for your valuable contributions to Zulip! I'm still working on this issue.
2017-12-03T14:55:43
zulip/zulip
7,694
zulip__zulip-7694
[ "7557" ]
6e85122724fb0980fb7a5b1721f8ec5ab8d91e77
diff --git a/zerver/lib/actions.py b/zerver/lib/actions.py --- a/zerver/lib/actions.py +++ b/zerver/lib/actions.py @@ -66,7 +66,7 @@ UserActivityInterval, active_user_ids, get_active_streams, \ realm_filters_for_realm, RealmFilter, \ get_owned_bot_dicts, stream_name_in_use, \ - get_old_unclaimed_attachments, get_cross_realm_emails, \ + get_old_unclaimed_attachments, is_cross_realm_bot_email, \ Reaction, EmailChangeStatus, CustomProfileField, \ custom_profile_fields_for_realm, get_huddle_user_ids, \ CustomProfileFieldValue, validate_attachment_request, get_system_bot, \ @@ -1531,8 +1531,7 @@ def validate_recipient_user_profiles(user_profiles: List[UserProfile], # We exempt cross-realm bots from the check that all the recipients # are in the same realm. realms = set() - exempt_emails = get_cross_realm_emails() - if sender.email not in exempt_emails: + if not is_cross_realm_bot_email(sender.email): realms.add(sender.realm_id) for user_profile in user_profiles: @@ -1540,7 +1539,7 @@ def validate_recipient_user_profiles(user_profiles: List[UserProfile], user_profile.realm.deactivated: raise ValidationError(_("'%s' is no longer using Zulip.") % (user_profile.email,)) recipient_profile_ids.add(user_profile.id) - if user_profile.email not in exempt_emails: + if not is_cross_realm_bot_email(user_profile.email): realms.add(user_profile.realm_id) if len(realms) > 1: @@ -1892,8 +1891,7 @@ def internal_send_message(realm, sender_email, recipient_type_name, recipients, system bot.""" # Verify the user is in fact a system bot - assert(sender_email.lower() in settings.CROSS_REALM_BOT_EMAILS or - sender_email == settings.ERROR_BOT) + assert(is_cross_realm_bot_email(sender_email) or sender_email == settings.ERROR_BOT) sender = get_system_bot(sender_email) parsed_recipients = extract_recipients(recipients) @@ -3843,7 +3841,7 @@ def get_status_dict(requesting_user_profile: UserProfile) -> Dict[Text, Dict[Tex return UserPresence.get_status_dict_by_realm(requesting_user_profile.realm_id) def get_cross_realm_dicts() -> List[Dict[str, Any]]: - users = bulk_get_users(list(get_cross_realm_emails()), None, + users = bulk_get_users(list(settings.CROSS_REALM_BOT_EMAILS), None, base_query=UserProfile.objects.filter( realm__string_id=settings.SYSTEM_BOT_REALM)).values() return [{'email': user.email, @@ -3874,7 +3872,7 @@ def do_send_confirmation_email(invitee: PreregistrationUser, from_address=FromAddress.NOREPLY, context=context) def email_not_system_bot(email: Text) -> None: - if email.lower() in settings.CROSS_REALM_BOT_EMAILS: + if is_cross_realm_bot_email(email): raise ValidationError('%s is an email address reserved for system bots' % (email,)) def validate_email_for_realm(target_realm: Realm, email: Text) -> None: diff --git a/zerver/models.py b/zerver/models.py --- a/zerver/models.py +++ b/zerver/models.py @@ -1404,7 +1404,7 @@ def get_user(email: Text, realm: Realm) -> UserProfile: return UserProfile.objects.select_related().get(email__iexact=email.strip(), realm=realm) def get_user_including_cross_realm(email: Text, realm: Optional[Realm]=None) -> UserProfile: - if email in get_cross_realm_emails(): + if is_cross_realm_bot_email(email): return get_system_bot(email) assert realm is not None return get_user(email, realm) @@ -1455,8 +1455,8 @@ def get_owned_bot_dicts(user_profile: UserProfile, } for botdict in result] -def get_cross_realm_emails() -> Set[Text]: - return set(settings.CROSS_REALM_BOT_EMAILS) +def is_cross_realm_bot_email(email: Text) -> bool: + return email.lower() in settings.CROSS_REALM_BOT_EMAILS # The Huddle class represents a group of individuals who have had a # Group Private Message conversation together. The actual membership
diff --git a/zerver/tests/test_bots.py b/zerver/tests/test_bots.py --- a/zerver/tests/test_bots.py +++ b/zerver/tests/test_bots.py @@ -11,7 +11,8 @@ from zerver.lib.actions import do_change_stream_invite_only from zerver.models import get_realm, get_stream, \ - Realm, Stream, UserProfile, get_user, get_bot_services, Service + Realm, Stream, UserProfile, get_user, get_bot_services, Service, \ + is_cross_realm_bot_email from zerver.lib.test_classes import ZulipTestCase, UploadSerializeMixin from zerver.lib.test_helpers import ( avatar_disk_path, @@ -988,3 +989,12 @@ def test_create_embedded_bot(self, **extras: Any) -> None: bot_info.update(extras) result = self.client_post("/json/bots", bot_info) self.assert_json_error(result, 'Invalid embedded bot name.') + + def test_is_cross_realm_bot_email(self) -> None: + self.assertTrue(is_cross_realm_bot_email("[email protected]")) + self.assertTrue(is_cross_realm_bot_email("[email protected]")) + self.assertFalse(is_cross_realm_bot_email("[email protected]")) + + with self.settings(CROSS_REALM_BOT_EMAILS={"[email protected]"}): + self.assertTrue(is_cross_realm_bot_email("[email protected]")) + self.assertFalse(is_cross_realm_bot_email("[email protected]"))
Clean up get_cross_realm_emails vs. CROSS_REALM_BOT_EMAILS We have (at least) two ways of getting the set of cross-realm bots: * `zerver.models.get_cross_realm_emails` is used in `zerver/lib/actions.py` and `zerver/models.py` * `settings.CROSS_REALM_BOT_EMAILS` is used directly in `zerver/lib/actions.py`, plus used to implement `get_cross_realm_emails` Moreover, in some places we explicitly lower-case the email address before looking for it in that set, and in others we don't. There should be just one way we do this -- probably a function like `is_cross_realm_bot_email`. That would help make sure we handle things in a uniform way with respect to casing, and also help a reader be confident there isn't a subtle distinction they're missing. To do this, I'd do something like: * Write an `is_cross_realm_bot_email`, next to `get_cross_realm_emails`. Write tests, being sure to test the case handling. * Change all the users of `get_cross_realm_emails` and `CROSS_REALM_BOT_EMAILS` to use that instead, where applicable. * I think the only exception is `get_cross_realm_dicts`, which actually wants the list rather than checking membership. This can use `CROSS_REALM_BOT_EMAILS` directly. * Delete `get_cross_realm_emails`.
Hello @zulip/server-refactoring members, this issue was labeled with the **area: refactoring** label, so you may want to check it out! <!-- areaLabelNotification --> @zulipbot claim
2017-12-07T20:42:27
zulip/zulip
7,760
zulip__zulip-7760
[ "4507" ]
a05631d8c6dd665472460f056141f1a31914c183
diff --git a/zerver/lib/bugdown/__init__.py b/zerver/lib/bugdown/__init__.py --- a/zerver/lib/bugdown/__init__.py +++ b/zerver/lib/bugdown/__init__.py @@ -355,6 +355,24 @@ def run(self, root: Element) -> None: continue img.set("src", get_camo_url(url)) +class BacktickPattern(markdown.inlinepatterns.Pattern): + """ Return a `<code>` element containing the matching text. """ + def __init__(self, pattern): + # type: (Text) -> None + markdown.inlinepatterns.Pattern.__init__(self, pattern) + self.ESCAPED_BSLASH = '%s%s%s' % (markdown.util.STX, ord('\\'), markdown.util.ETX) + self.tag = 'code' + + def handleMatch(self, m): + # type: (Match[Text]) -> Union[Text, Element] + if m.group(4): + el = markdown.util.etree.Element(self.tag) + # Modified to not strip whitespace + el.text = markdown.util.AtomicString(m.group(4)) + return el + else: + return m.group(2).replace('\\\\', self.ESCAPED_BSLASH) + class InlineInterestingLinkProcessor(markdown.treeprocessors.Treeprocessor): TWITTER_MAX_IMAGE_HEIGHT = 400 TWITTER_MAX_TO_PREVIEW = 3 @@ -1346,7 +1364,7 @@ def extendMarkdown(self, md: markdown.Markdown, md_globals: Dict[str, Any]) -> N for k in ('image_link', 'image_reference', 'automail', 'autolink', 'link', 'reference', 'short_reference', 'escape', 'strong_em', 'emphasis', 'emphasis2', - 'linebreak', 'strong'): + 'linebreak', 'strong', 'backtick'): del md.inlinePatterns[k] try: # linebreak2 was removed upstream in version 3.2.1, so @@ -1357,6 +1375,12 @@ def extendMarkdown(self, md: markdown.Markdown, md_globals: Dict[str, Any]) -> N md.preprocessors.add("custom_text_notifications", AlertWordsNotificationProcessor(md), "_end") + # Inline code block without whitespace stripping + md.inlinePatterns.add( + "backtick", + BacktickPattern(r'(?:(?<!\\)((?:\\{2})+)(?=`+)|(?<!\\)(`+)(.+?)(?<!`)\3(?!`))'), + "_begin") + # Custom bold syntax: **foo** but not __foo__ md.inlinePatterns.add('strong', markdown.inlinepatterns.SimpleTagPattern(r'(\*\*)([^\n]+?)\2', 'strong'),
diff --git a/zerver/fixtures/markdown_test_cases.json b/zerver/fixtures/markdown_test_cases.json --- a/zerver/fixtures/markdown_test_cases.json +++ b/zerver/fixtures/markdown_test_cases.json @@ -25,6 +25,12 @@ "expected_output": "<p>Hamlet once said</p>\n<div class=\"codehilite\"><pre><span></span>def func():\n x = 1\n\n y = 2\n\n z = 3\n</pre></div>\n\n\n<p>And all was good.</p>", "text_content": "Hamlet once said\ndef func():\n x = 1\n\n y = 2\n\n z = 3\n\n\n\nAnd all was good." }, + { + "name": "inline_code_spaces", + "input": "` outer ` ``` space ```", + "expected_output": "<p><code> outer </code> <code> space </code></p>", + "text_content": " outer space " + }, { "name": "codeblock_backticks", "input": "\n```\nfenced code\n```\n\n```inline code```\n", @@ -93,8 +99,8 @@ { "name": "dangerous_block", "input": "``` one ```\n\n``` two ```\n\n~~~~\nx = 1", - "expected_output": "<p><code>one</code></p>\n<p><code>two</code></p>\n<div class=\"codehilite\"><pre><span></span>x = 1\n</pre></div>", - "text_content": "one\ntwo\nx = 1\n" + "expected_output": "<p><code> one </code></p>\n<p><code> two </code></p>\n<div class=\"codehilite\"><pre><span></span>x = 1\n</pre></div>", + "text_content": " one \n two \nx = 1\n" }, { "name": "four_space_code_block",
bugdown: Don't rstrip inline code blocks. We currently rstrip regular code blocks, to make copy-pasting from the terminal easier (see #3998). However, inline code blocks should keep all their spaces. To work on this issue, I would start by reading http://zulip.readthedocs.io/en/latest/markdown.html and trying to understanding the code in #4185.
Hello @zulip/server-markdown members, this issue was labeled with the **area: markdown** label, so you may want to check it out! @zulipbot claim Congratulations, @TAAPArthur, you've successfully claimed your first issue! Contributing to Zulip will (hopefully) be a fun and rewarding experience, and I'm here to help get you off to a good start. First things first: * Join me on the [Zulip developers' server](https://chat.zulip.org), to get help, chat about this issue, and meet the other developers. * Sign the [Dropbox Contributor License Agreement](https://opensource.dropbox.com/cla/), so that Zulip can use your code. * [Unwatch this repository](https://help.github.com/articles/unwatching-repositories/), so that you don't get 100 emails a day. As you work on this issue, you'll also want to refer to the [Zulip code contribution guide](http://zulip.readthedocs.io/en/latest/index.html#code-docs), as well as the rest of the developer documentation on that site. See you on the other side (that is, the pull request side)! @zulipbot claim Congratulations, @IshanGupta10, you've successfully claimed your first issue! Contributing to Zulip will (hopefully) be a fun and rewarding experience, and I'm here to help get you off to a good start. First things first: * Join me on the [Zulip developers' server](https://chat.zulip.org), to get help, chat about this issue, and meet the other developers. * Sign the [Dropbox Contributor License Agreement](https://opensource.dropbox.com/cla/), so that Zulip can use your code. * [Unwatch this repository](https://help.github.com/articles/unwatching-repositories/), so that you don't get 100 emails a day. As you work on this issue, you'll also want to refer to the [Zulip code contribution guide](http://zulip.readthedocs.io/en/latest/index.html#code-docs), as well as the rest of the developer documentation on that site. See you on the other side (that is, the pull request side)! Hello @IshanGupta10, you have been unassigned from this issue because you have not updated this issue or any referenced pull requests for over 10 days. You can reclaim this issue or claim any other issue by commenting `@zulipbot claim` on that issue. Thanks for your contributions, and hope to see you again soon!
2017-12-13T21:30:02
zulip/zulip
7,782
zulip__zulip-7782
[ "3915" ]
21008a49b9caf08c560256b6cba8bd4884e9225e
diff --git a/version.py b/version.py --- a/version.py +++ b/version.py @@ -1,3 +1,3 @@ ZULIP_VERSION = "1.7.1+git" -PROVISION_VERSION = '14.1' +PROVISION_VERSION = '14.2' diff --git a/zerver/lib/mdiff.py b/zerver/lib/mdiff.py new file mode 100755 --- /dev/null +++ b/zerver/lib/mdiff.py @@ -0,0 +1,15 @@ +import os +import subprocess +import logging +import difflib + +def diff_strings(output: str, expected_output: str) -> str: + + mdiff_path = "frontend_tests/zjsunit/mdiff.js" + if not os.path.isfile(mdiff_path): # nocoverage + logging.error("Cannot find mdiff for markdown diff rendering") + return None + + command = ['node', mdiff_path, output, expected_output] + diff = subprocess.check_output(command).decode('utf-8') + return diff
diff --git a/frontend_tests/node_tests/markdown.js b/frontend_tests/node_tests/markdown.js --- a/frontend_tests/node_tests/markdown.js +++ b/frontend_tests/node_tests/markdown.js @@ -1,6 +1,7 @@ /*global Dict */ var path = zrequire('path', 'path'); var fs = zrequire('fs', 'fs'); + zrequire('hash_util'); zrequire('katex', 'node_modules/katex/dist/katex.min.js'); zrequire('marked', 'third/marked/lib/marked'); @@ -172,18 +173,19 @@ var bugdown_data = JSON.parse(fs.readFileSync(path.join(__dirname, '../../zerver (function test_marked_shared() { var tests = bugdown_data.regular_tests; + tests.forEach(function (test) { var message = {raw_content: test.input}; markdown.apply_markdown(message); var output = message.content; if (test.marked_expected_output) { - assert.notEqual(test.expected_output, output); - assert.equal(test.marked_expected_output, output); + global.bugdown_assert.notEqual(test.expected_output, output); + global.bugdown_assert.equal(test.marked_expected_output, output); } else if (test.backend_only_rendering) { assert.equal(markdown.contains_backend_only_syntax(test.input), true); } else { - assert.equal(test.expected_output, output); + global.bugdown_assert.equal(test.expected_output, output); } }); }()); diff --git a/frontend_tests/zjsunit/bugdown_assert.js b/frontend_tests/zjsunit/bugdown_assert.js new file mode 100644 --- /dev/null +++ b/frontend_tests/zjsunit/bugdown_assert.js @@ -0,0 +1,184 @@ +/** + * bugdown_assert.js + * + * Used to determine whether two Markdown HTML strings are semantically + * equivalent. Differs from the naive string-comparison approach in that + * differently typed but equivalent HTML fragments, such as '<p>&quot;</p>' + * and '<p>\"</p>', and '<span attr1="a" attr2="b"></span>' and + * '<span attr2="a" attr1="b"></span>', are still considered equal. + * + * The exported method equal() serves as a drop-in replacement for + * assert.equal(). Likewise, the exported method notEqual() replaces + * assert.notEqual(). + * + * There is a default _output_formatter used to create the + * AssertionError error message; this function can be overriden using + * the exported setFormatter() function below. + * + * The HTML passed to the _output_formatter is not the original HTML, but + * rather a serialized version of a DOM element generated from the original + * HTML. This makes it easier to spot relevant differences. + */ + +const jsdom = require('jsdom'); +const _ = require('underscore'); + +const mdiff = require('./mdiff.js'); + +// Module-level global instance of MarkdownComparer, initialized when needed +let _markdownComparerInstance = null; + +class MarkdownComparer { + constructor(output_formatter) { + this._output_formatter = output_formatter || function (actual, expected) { + return ["Actual and expected output do not match.", + actual, + "!=", + expected, + ].join('\n'); + }; + this._document = jsdom.jsdom(); + } + + setFormatter(output_formatter) { + this._output_formatter = output_formatter || this._output_formatter; + } + + _htmlToElement(html, id) { + const template = this._document.createElement('template'); + const id_node = this._document.createAttribute('id'); + id_node.value = id; + template.setAttributeNode(id_node); + template.innerHTML = html; + return template; + } + + _haveEqualContents(node1, node2) { + if (node1.content.childNodes.length !== node2.content.childNodes.length) { + return false; + } + return _.reduce( + _.zip(node1.content.childNodes, node2.content.childNodes), + (prev, nodePair) => { return prev && nodePair[0].isEqualNode(nodePair[1]); }, + true + ); + } + + _reorderAttributes(node) { + // Sorts every attribute in every element by name. Ensures consistent diff HTML output + + const attributeList = []; + _.forEach(node.attributes, (attr) => { + attributeList.push(attr); + }); + + // If put in above forEach loop, causes issues (possible nodes.attribute invalidation?) + attributeList.forEach((attr) => {node.removeAttribute(attr.name);}); + + attributeList.sort((a, b) => { + const name_a = a.name; + const name_b = b.name; + if (name_a < name_b) { + return -1; + } else if (name_a > name_b) { + return 1; + } + return 0; + }); + + // Put them back in, in order + attributeList.forEach((attribute) => { + node.setAttribute(attribute.name, attribute.value); + }); + + if (node.hasChildNodes()) { + _.forEach(node.children, (childNode) => { + this._reorderAttributes(childNode); + }); + } + if (node.content && node.content.hasChildNodes()) { + _.forEach(node.content.children, (childNode) => { + this._reorderAttributes(childNode); + }); + } + return node; + } + + _compare(actual_markdown, expected_markdown) { + const ID_ACTUAL = "0"; + const ID_EXPECTED = "1"; + + const element_actual = this._htmlToElement(actual_markdown, ID_ACTUAL); + const element_expected = this._htmlToElement(expected_markdown, ID_EXPECTED); + + let are_equivalent = false; + let html = {}; + + are_equivalent = this._haveEqualContents(element_actual, element_expected); + if (!are_equivalent) { + html = { + actual : this._reorderAttributes(element_actual).innerHTML, + expected : this._reorderAttributes(element_expected).innerHTML, + }; + } + + element_actual.remove(); + element_expected.remove(); + + return { are_equivalent, html }; + } + + assertEqual(actual, expected, message) { + const comparison_results = this._compare(actual, expected); + + if (comparison_results.are_equivalent === false) { + throw new assert.AssertionError({ + message : message || this._output_formatter( + comparison_results.html.actual, + comparison_results.html.expected + ), + }); + } + } + + assertNotEqual(actual, expected, message) { + const comparison_results = this._compare(actual, expected); + + if (comparison_results.are_equivalent) { + throw new assert.AssertionError({ + message : message || [ + "actual and expected output produce semantially identical HTML", + actual, + "==", + expected, + ].join('\n'), + }); + } + } +} + +function returnComparer() { + if (!_markdownComparerInstance) { + _markdownComparerInstance = new MarkdownComparer((actual, expected) => { + return [ + "Actual and expected output do not match. Showing diff", + mdiff.diff_strings(actual, expected), + ].join('\n'); + }); + } + return _markdownComparerInstance; +} + +module.exports = { + equal(expected, actual, message) { + returnComparer().assertEqual(actual, expected, message); + }, + + notEqual(expected, actual, message) { + returnComparer().assertNotEqual(actual, expected, message); + }, + + setFormatter(output_formatter) { + returnComparer().setFormatter(output_formatter); + }, +}; diff --git a/frontend_tests/zjsunit/index.js b/frontend_tests/zjsunit/index.js --- a/frontend_tests/zjsunit/index.js +++ b/frontend_tests/zjsunit/index.js @@ -54,6 +54,9 @@ module.prototype.hot = { accept: noop, }; +// Set up bugdown comparison helper +global.bugdown_assert = require('./bugdown_assert.js'); + output.start_writing(); files.forEach(function (file) { diff --git a/frontend_tests/zjsunit/mdiff.js b/frontend_tests/zjsunit/mdiff.js new file mode 100644 --- /dev/null +++ b/frontend_tests/zjsunit/mdiff.js @@ -0,0 +1,146 @@ +/** + * mdiff.js + * + * Used to produce colorful and informative diffs for comparison of generated + * Markdown. Unlike the built-in diffs used in python or node.js assert libraries, + * is actually designed to be effective for long, single-line comparisons. + * + * Based on diffing library difflib, a js port of the python library. + * + * The sole exported function diff_strings(string_0, string_1) returns a pretty-printed + * unicode string containing their diff. + */ + +const _ = require('underscore'); +const difflib = require('difflib'); + +function apply_color(input_string, changes) { + let previous_index = 0; + let processed_string = input_string.slice(0,2); + input_string = input_string.slice(2); + + const formatter = { + delete : (string) => { return "\u001b[31m" + string + "\u001b[0m"; }, + insert : (string) => { return "\u001b[32m" + string + "\u001b[0m"; }, + replace : (string) => { return "\u001b[33m" + string + "\u001b[0m"; }, + }; + changes.forEach((change) => { + if (formatter.hasOwnProperty(change.tag)) { + processed_string += input_string.slice(previous_index, change.beginning_index); + processed_string += formatter[change.tag]( + input_string.slice(change.beginning_index, change.ending_index) + ); + previous_index = change.ending_index; + } + }); + + processed_string += input_string.slice(previous_index); + return processed_string; +} + +/** + * The library difflib produces diffs that look as follows: + * + * - <p>upgrade! yes</p> + * ? ^^ - + * + <p>downgrade yes.</p> + * ? ^^^^ + + * + * The purpose of this function is to facilitate converting these diffs into + * colored versions, where the question-mark lines are removed, replaced with + * directions to add appropriate color to the lines that they annotate. + */ +function parse_questionmark_line(questionmark_line) { + let current_sequence = ""; // Either "^", "-", "+", or "" + let beginning_index = 0; + let index = 0; + + const changes_list = []; + const aliases = { + "^" : "replace", + "+" : "insert", + "-" : "delete", + }; + const add_change = () => { + if (current_sequence) { + changes_list.push({ + tag : aliases[current_sequence], + beginning_index, + ending_index : index, + }); + current_sequence = ""; + } + }; + + questionmark_line = questionmark_line.slice(2).trimRight("\n"); + + for (const character of questionmark_line) { + if (aliases.hasOwnProperty(character)) { + if (current_sequence !== character) { + add_change(); + current_sequence = character; + beginning_index = index; + } + } else { + add_change(); + } + index += 1; + } + + // In case we have a "change" involving the last character on a line + // e.g. a string such as "? ^^ -- ++++" + add_change(); + + return changes_list; +} + +function diff_strings(string_0, string_1) { + let output_lines = []; + let ndiff_output = ""; + let changes_list = []; + + ndiff_output = difflib.ndiff(string_0.split("\n"), string_1.split("\n")); + + ndiff_output.forEach((line) => { + if (line.startsWith("+")) { + output_lines.push(line); + } else if (line.startsWith("-")) { + output_lines.push(line); + } else if (line.startsWith("?")) { + changes_list = parse_questionmark_line(line); + output_lines[output_lines.length - 1] = apply_color( + output_lines[output_lines.length -1], changes_list); + } else { + output_lines.push(line); + } + }); + + const emphasize_codes = (string) => { + return "\u001b[34m" + string.slice(0,1) + "\u001b[0m" + string.slice(1); + }; + output_lines = _.map(output_lines, emphasize_codes); + + return output_lines.join("\n"); +} + +module.exports = { diff_strings }; + +// Simple CLI for this module +// Only run this code if called as a command-line utility +if (require.main === module) { + // First two args are just "node" and "mdiff.js" + const argv = require('minimist')(process.argv.slice(2)); + + if (_.has(argv, "help")) { + console.log(process.argv[0] + " " + process.argv[1] + + " [ --help ]" + + " string_0" + + " string_1" + + "\n" + + "Where string_0 and string_1 are the strings to be diffed" + ); + } + + const output = diff_strings(argv._[0], argv._[1]); + console.log(output); +} diff --git a/zerver/fixtures/markdown_test_cases.json b/zerver/fixtures/markdown_test_cases.json --- a/zerver/fixtures/markdown_test_cases.json +++ b/zerver/fixtures/markdown_test_cases.json @@ -19,6 +19,11 @@ "expected_output": "<p>Hamlet once said</p>\n<div class=\"codehilite\"><pre><span></span>def func():\n x = 1\n\n y = 2\n\n z = 3\n</pre></div>\n\n\n<p>And all was good.</p>", "text_content": "Hamlet once said\ndef func():\n x = 1\n\n y = 2\n\n z = 3\n\n\n\nAnd all was good." }, + { + "name": "test", + "input": "it's lunch time", + "expected_output": "<p>it's lunch time</p>" + }, { "name": "codeblock_trailing_whitespace", "input": "Hamlet once said\n~~~~\ndef func():\n x = 1\n\n y = 2\t\t\n\n z = 3 \n~~~~\nAnd all was good.", @@ -93,7 +98,6 @@ "name": "dangerous_block", "input": "xxxxxx xxxxx xxxxxxxx xxxx. x xxxx xxxxxxxxxx:\n\n```\"xxxx xxxx\\xxxxx\\xxxxxx\"```\n\nxxx xxxx xxxxx:```xx.xxxxxxx(x'^xxxx$', xx.xxxxxxxxx)```\n\nxxxxxxx'x xxxx xxxxxxxxxx ```'xxxx'```, xxxxx xxxxxxxxx xxxxx ^ xxx $ xxxxxx xxxxx xxxxxxxxxxxx xxx xxxx xx x xxxx xx xxxx xx xxx xxxxx xxxxxx?", "expected_output": "<p>xxxxxx xxxxx xxxxxxxx xxxx. x xxxx xxxxxxxxxx:</p>\n<p><code>\"xxxx xxxx\\xxxxx\\xxxxxx\"</code></p>\n<p>xxx xxxx xxxxx:<code>xx.xxxxxxx(x'^xxxx$', xx.xxxxxxxxx)</code></p>\n<p>xxxxxxx'x xxxx xxxxxxxxxx <code>'xxxx'</code>, xxxxx xxxxxxxxx xxxxx ^ xxx $ xxxxxx xxxxx xxxxxxxxxxxx xxx xxxx xx x xxxx xx xxxx xx xxx xxxxx xxxxxx?</p>", - "marked_expected_output": "<p>xxxxxx xxxxx xxxxxxxx xxxx. x xxxx xxxxxxxxxx:</p>\n<p><code>&quot;xxxx xxxx\\xxxxx\\xxxxxx&quot;</code></p>\n<p>xxx xxxx xxxxx:<code>xx.xxxxxxx(x&#39;^xxxx$&#39;, xx.xxxxxxxxx)</code></p>\n<p>xxxxxxx&#39;x xxxx xxxxxxxxxx <code>&#39;xxxx&#39;</code>, xxxxx xxxxxxxxx xxxxx ^ xxx $ xxxxxx xxxxx xxxxxxxxxxxx xxx xxxx xx x xxxx xx xxxx xx xxx xxxxx xxxxxx?</p>", "text_content": "xxxxxx xxxxx xxxxxxxx xxxx. x xxxx xxxxxxxxxx:\n\"xxxx xxxx\\xxxxx\\xxxxxx\"\nxxx xxxx xxxxx:xx.xxxxxxx(x'^xxxx$', xx.xxxxxxxxx)\nxxxxxxx'x xxxx xxxxxxxxxx 'xxxx', xxxxx xxxxxxxxx xxxxx ^ xxx $ xxxxxx xxxxx xxxxxxxxxxxx xxx xxxx xx x xxxx xx xxxx xx xxx xxxxx xxxxxx?" }, { @@ -447,8 +451,7 @@ { "name": "safe_html_messed_up_complexly_nested_script_tags", "input": "<scr<script></script>ipt type=\"text/javascript\">alert(\"foo\");</<script></script>script<del></del>>", - "expected_output": "<p>&lt;scr&lt;script&gt;&lt;/script&gt;ipt type=\"text/javascript\"&gt;alert(\"foo\");&lt;/&lt;script&gt;&lt;/script&gt;script&lt;del&gt;&lt;/del&gt;&gt;</p>", - "marked_expected_output": "<p>&lt;scr&lt;script&gt;&lt;/script&gt;ipt type=&quot;text/javascript&quot;&gt;alert(&quot;foo&quot;);&lt;/&lt;script&gt;&lt;/script&gt;script&lt;del&gt;&lt;/del&gt;&gt;</p>" + "expected_output": "<p>&lt;scr&lt;script&gt;&lt;/script&gt;ipt type=\"text/javascript\"&gt;alert(\"foo\");&lt;/&lt;script&gt;&lt;/script&gt;script&lt;del&gt;&lt;/del&gt;&gt;</p>" }, { "name": "safe_html_unclosed_tag", @@ -527,14 +530,12 @@ { "name": "tex_inline", "input": "$$1 \\oplus 0 = 1$$", - "expected_output": "<p><span class=\"katex\"><span class=\"katex-mathml\"><math><semantics><mrow><mn>1</mn><mo>⊕</mo><mn>0</mn><mo>=</mo><mn>1</mn></mrow><annotation encoding=\"application/x-tex\">1 \\oplus 0 = 1</annotation></semantics></math></span><span aria-hidden=\"true\" class=\"katex-html\"><span class=\"strut\" style=\"height:0.64444em;\"></span><span class=\"strut bottom\" style=\"height:0.72777em;vertical-align:-0.08333em;\"></span><span class=\"base\"><span class=\"mord mathrm\">1</span><span class=\"mbin\">⊕</span><span class=\"mord mathrm\">0</span><span class=\"mrel\">=</span><span class=\"mord mathrm\">1</span></span></span></span></p>", - "marked_expected_output": "<p><span class=\"katex\"><span class=\"katex-mathml\"><math><semantics><mrow><mn>1</mn><mo>⊕</mo><mn>0</mn><mo>=</mo><mn>1</mn></mrow><annotation encoding=\"application/x-tex\">1 \\oplus 0 = 1</annotation></semantics></math></span><span class=\"katex-html\" aria-hidden=\"true\"><span class=\"strut\" style=\"height:0.64444em;\"></span><span class=\"strut bottom\" style=\"height:0.72777em;vertical-align:-0.08333em;\"></span><span class=\"base\"><span class=\"mord mathrm\">1</span><span class=\"mbin\">⊕</span><span class=\"mord mathrm\">0</span><span class=\"mrel\">=</span><span class=\"mord mathrm\">1</span></span></span></span></p>" + "expected_output": "<p><span class=\"katex\"><span class=\"katex-mathml\"><math><semantics><mrow><mn>1</mn><mo>⊕</mo><mn>0</mn><mo>=</mo><mn>1</mn></mrow><annotation encoding=\"application/x-tex\">1 \\oplus 0 = 1</annotation></semantics></math></span><span aria-hidden=\"true\" class=\"katex-html\"><span class=\"strut\" style=\"height:0.64444em;\"></span><span class=\"strut bottom\" style=\"height:0.72777em;vertical-align:-0.08333em;\"></span><span class=\"base\"><span class=\"mord mathrm\">1</span><span class=\"mbin\">⊕</span><span class=\"mord mathrm\">0</span><span class=\"mrel\">=</span><span class=\"mord mathrm\">1</span></span></span></span></p>" }, { "name": "tex_complex", "input": "$$\\Phi_E = \\oint E \\cdot dA$$", - "expected_output": "<p><span class=\"katex\"><span class=\"katex-mathml\"><math><semantics><mrow><msub><mi mathvariant=\"normal\">Φ</mi><mi>E</mi></msub><mo>=</mo><mo>∮</mo><mi>E</mi><mo>⋅</mo><mi>d</mi><mi>A</mi></mrow><annotation encoding=\"application/x-tex\">\\Phi_E = \\oint E \\cdot dA</annotation></semantics></math></span><span aria-hidden=\"true\" class=\"katex-html\"><span class=\"strut\" style=\"height:0.805em;\"></span><span class=\"strut bottom\" style=\"height:1.11112em;vertical-align:-0.30612em;\"></span><span class=\"base\"><span class=\"mord\"><span class=\"mord mathrm\">Φ</span><span class=\"msupsub\"><span class=\"vlist-t vlist-t2\"><span class=\"vlist-r\"><span class=\"vlist\" style=\"height:0.32833099999999993em;\"><span style=\"top:-2.5500000000000003em;margin-left:0em;margin-right:0.05em;\"><span class=\"pstrut\" style=\"height:2.7em;\"></span><span class=\"sizing reset-size6 size3 mtight\"><span class=\"mord mathit mtight\" style=\"margin-right:0.05764em;\">E</span></span></span></span><span class=\"vlist-s\">​</span></span><span class=\"vlist-r\"><span class=\"vlist\" style=\"height:0.15em;\"></span></span></span></span></span><span class=\"mrel\">=</span><span class=\"mop op-symbol small-op\" style=\"margin-right:0.19445em;position:relative;top:-0.0005599999999999772em;\">∮</span><span class=\"mord mathit\" style=\"margin-right:0.05764em;\">E</span><span class=\"mbin\">⋅</span><span class=\"mord mathit\">d</span><span class=\"mord mathit\">A</span></span></span></span></p>", - "marked_expected_output": "<p><span class=\"katex\"><span class=\"katex-mathml\"><math><semantics><mrow><msub><mi mathvariant=\"normal\">Φ</mi><mi>E</mi></msub><mo>=</mo><mo>∮</mo><mi>E</mi><mo>⋅</mo><mi>d</mi><mi>A</mi></mrow><annotation encoding=\"application/x-tex\">\\Phi_E = \\oint E \\cdot dA</annotation></semantics></math></span><span class=\"katex-html\" aria-hidden=\"true\"><span class=\"strut\" style=\"height:0.805em;\"></span><span class=\"strut bottom\" style=\"height:1.11112em;vertical-align:-0.30612em;\"></span><span class=\"base\"><span class=\"mord\"><span class=\"mord mathrm\">Φ</span><span class=\"msupsub\"><span class=\"vlist-t vlist-t2\"><span class=\"vlist-r\"><span class=\"vlist\" style=\"height:0.32833099999999993em;\"><span style=\"top:-2.5500000000000003em;margin-left:0em;margin-right:0.05em;\"><span class=\"pstrut\" style=\"height:2.7em;\"></span><span class=\"sizing reset-size6 size3 mtight\"><span class=\"mord mathit mtight\" style=\"margin-right:0.05764em;\">E</span></span></span></span><span class=\"vlist-s\">​</span></span><span class=\"vlist-r\"><span class=\"vlist\" style=\"height:0.15em;\"></span></span></span></span></span><span class=\"mrel\">=</span><span class=\"mop op-symbol small-op\" style=\"margin-right:0.19445em;position:relative;top:-0.0005599999999999772em;\">∮</span><span class=\"mord mathit\" style=\"margin-right:0.05764em;\">E</span><span class=\"mbin\">⋅</span><span class=\"mord mathit\">d</span><span class=\"mord mathit\">A</span></span></span></span></p>" + "expected_output": "<p><span class=\"katex\"><span class=\"katex-mathml\"><math><semantics><mrow><msub><mi mathvariant=\"normal\">Φ</mi><mi>E</mi></msub><mo>=</mo><mo>∮</mo><mi>E</mi><mo>⋅</mo><mi>d</mi><mi>A</mi></mrow><annotation encoding=\"application/x-tex\">\\Phi_E = \\oint E \\cdot dA</annotation></semantics></math></span><span aria-hidden=\"true\" class=\"katex-html\"><span class=\"strut\" style=\"height:0.805em;\"></span><span class=\"strut bottom\" style=\"height:1.11112em;vertical-align:-0.30612em;\"></span><span class=\"base\"><span class=\"mord\"><span class=\"mord mathrm\">Φ</span><span class=\"msupsub\"><span class=\"vlist-t vlist-t2\"><span class=\"vlist-r\"><span class=\"vlist\" style=\"height:0.32833099999999993em;\"><span style=\"top:-2.5500000000000003em;margin-left:0em;margin-right:0.05em;\"><span class=\"pstrut\" style=\"height:2.7em;\"></span><span class=\"sizing reset-size6 size3 mtight\"><span class=\"mord mathit mtight\" style=\"margin-right:0.05764em;\">E</span></span></span></span><span class=\"vlist-s\">​</span></span><span class=\"vlist-r\"><span class=\"vlist\" style=\"height:0.15em;\"></span></span></span></span></span><span class=\"mrel\">=</span><span class=\"mop op-symbol small-op\" style=\"margin-right:0.19445em;position:relative;top:-0.0005599999999999772em;\">∮</span><span class=\"mord mathit\" style=\"margin-right:0.05764em;\">E</span><span class=\"mbin\">⋅</span><span class=\"mord mathit\">d</span><span class=\"mord mathit\">A</span></span></span></span></p>" }, { "name": "tex_escaped", @@ -562,14 +563,12 @@ { "name": "tex_money", "input": "Tickets are $5 to $20 for youth, $10-$30 for adults, so we are hoping to bring in $500 from the event ($$x \\approx 500\\$$$)", - "expected_output": "<p>Tickets are $5 to $20 for youth, $10-$30 for adults, so we are hoping to bring in $500 from the event (<span class=\"katex\"><span class=\"katex-mathml\"><math><semantics><mrow><mi>x</mi><mo>≈</mo><mn>5</mn><mn>0</mn><mn>0</mn><mi mathvariant=\"normal\">$</mi></mrow><annotation encoding=\"application/x-tex\">x \\approx 500\\$</annotation></semantics></math></span><span aria-hidden=\"true\" class=\"katex-html\"><span class=\"strut\" style=\"height:0.75em;\"></span><span class=\"strut bottom\" style=\"height:0.80556em;vertical-align:-0.05556em;\"></span><span class=\"base\"><span class=\"mord mathit\">x</span><span class=\"mrel\">≈</span><span class=\"mord mathrm\">5</span><span class=\"mord mathrm\">0</span><span class=\"mord mathrm\">0</span><span class=\"mord mathrm\">$</span></span></span></span>)</p>", - "marked_expected_output": "<p>Tickets are $5 to $20 for youth, $10-$30 for adults, so we are hoping to bring in $500 from the event (<span class=\"katex\"><span class=\"katex-mathml\"><math><semantics><mrow><mi>x</mi><mo>≈</mo><mn>5</mn><mn>0</mn><mn>0</mn><mi mathvariant=\"normal\">$</mi></mrow><annotation encoding=\"application/x-tex\">x \\approx 500\\$</annotation></semantics></math></span><span class=\"katex-html\" aria-hidden=\"true\"><span class=\"strut\" style=\"height:0.75em;\"></span><span class=\"strut bottom\" style=\"height:0.80556em;vertical-align:-0.05556em;\"></span><span class=\"base\"><span class=\"mord mathit\">x</span><span class=\"mrel\">≈</span><span class=\"mord mathrm\">5</span><span class=\"mord mathrm\">0</span><span class=\"mord mathrm\">0</span><span class=\"mord mathrm\">$</span></span></span></span>)</p>" + "expected_output": "<p>Tickets are $5 to $20 for youth, $10-$30 for adults, so we are hoping to bring in $500 from the event (<span class=\"katex\"><span class=\"katex-mathml\"><math><semantics><mrow><mi>x</mi><mo>≈</mo><mn>5</mn><mn>0</mn><mn>0</mn><mi mathvariant=\"normal\">$</mi></mrow><annotation encoding=\"application/x-tex\">x \\approx 500\\$</annotation></semantics></math></span><span aria-hidden=\"true\" class=\"katex-html\"><span class=\"strut\" style=\"height:0.75em;\"></span><span class=\"strut bottom\" style=\"height:0.80556em;vertical-align:-0.05556em;\"></span><span class=\"base\"><span class=\"mord mathit\">x</span><span class=\"mrel\">≈</span><span class=\"mord mathrm\">5</span><span class=\"mord mathrm\">0</span><span class=\"mord mathrm\">0</span><span class=\"mord mathrm\">$</span></span></span></span>)</p>" }, { "name": "tex_inline_permissive_spacing", "input": "$$ x = 7 $$", - "expected_output": "<p><span class=\"katex\"><span class=\"katex-mathml\"><math><semantics><mrow><mi>x</mi><mo>=</mo><mn>7</mn></mrow><annotation encoding=\"application/x-tex\"> x = 7 </annotation></semantics></math></span><span aria-hidden=\"true\" class=\"katex-html\"><span class=\"strut\" style=\"height:0.64444em;\"></span><span class=\"strut bottom\" style=\"height:0.64444em;vertical-align:0em;\"></span><span class=\"base\"><span class=\"mord mathit\">x</span><span class=\"mrel\">=</span><span class=\"mord mathrm\">7</span></span></span></span></p>", - "marked_expected_output": "<p><span class=\"katex\"><span class=\"katex-mathml\"><math><semantics><mrow><mi>x</mi><mo>=</mo><mn>7</mn></mrow><annotation encoding=\"application/x-tex\"> x = 7 </annotation></semantics></math></span><span class=\"katex-html\" aria-hidden=\"true\"><span class=\"strut\" style=\"height:0.64444em;\"></span><span class=\"strut bottom\" style=\"height:0.64444em;vertical-align:0em;\"></span><span class=\"base\"><span class=\"mord mathit\">x</span><span class=\"mrel\">=</span><span class=\"mord mathrm\">7</span></span></span></span></p>" + "expected_output": "<p><span class=\"katex\"><span class=\"katex-mathml\"><math><semantics><mrow><mi>x</mi><mo>=</mo><mn>7</mn></mrow><annotation encoding=\"application/x-tex\"> x = 7 </annotation></semantics></math></span><span aria-hidden=\"true\" class=\"katex-html\"><span class=\"strut\" style=\"height:0.64444em;\"></span><span class=\"strut bottom\" style=\"height:0.64444em;vertical-align:0em;\"></span><span class=\"base\"><span class=\"mord mathit\">x</span><span class=\"mrel\">=</span><span class=\"mord mathrm\">7</span></span></span></span></p>" }, { "name": "tex_inline_prohibited_newline", diff --git a/zerver/tests/test_bugdown.py b/zerver/tests/test_bugdown.py --- a/zerver/tests/test_bugdown.py +++ b/zerver/tests/test_bugdown.py @@ -22,6 +22,7 @@ ZulipTestCase, ) from zerver.lib.test_runner import slow +from zerver.lib import mdiff from zerver.models import ( realm_in_local_realm_filters_cache, flush_per_request_caches, @@ -165,6 +166,12 @@ def bugdown_convert(text: Text) -> Text: return bugdown.convert(text, message_realm=get_realm('zulip')) class BugdownMiscTest(ZulipTestCase): + def test_diffs_work_as_expected(self) -> None: + str1 = "<p>The quick brown fox jumps over the lazy dog. Animal stories are fun, yeah</p>" + str2 = "<p>The fast fox jumps over the lazy dogs and cats. Animal stories are fun</p>" + expected_diff = "\u001b[34m-\u001b[0m <p>The \u001b[33mquick brown\u001b[0m fox jumps over the lazy dog. Animal stories are fun\u001b[31m, yeah\u001b[0m</p>\n\u001b[34m+\u001b[0m <p>The \u001b[33mfast\u001b[0m fox jumps over the lazy dog\u001b[32ms and cats\u001b[0m. Animal stories are fun</p>\n" + self.assertEqual(mdiff.diff_strings(str1, str2), expected_diff) + def test_get_full_name_info(self) -> None: realm = get_realm('zulip') @@ -207,6 +214,14 @@ def test_mention_data(self) -> None: self.assertEqual(user['email'], hamlet.email) class BugdownTest(ZulipTestCase): + def assertEqual(self, first: Any, second: Any, msg: Text = "") -> None: + if isinstance(first, Text) and isinstance(second, Text): + if first != second: + raise AssertionError("Actual and expected outputs do not match; showing diff.\n" + + mdiff.diff_strings(first, second) + msg) + else: + super().assertEqual(first, second) + def load_bugdown_tests(self) -> Tuple[Dict[Text, Any], List[List[Text]]]: test_fixtures = {} data_file = open(os.path.join(os.path.dirname(__file__), '../fixtures/markdown_test_cases.json'), 'r')
Show a pretty-printed diff of the full strings in bugdown tests' errors (Make sure you've read zulip.readthedocs.io/en/latest/markdown.html to understand this issue). # Description Right now, when a test in the strings checked in `zerver/fixtures/markdown_test_cases.json` doesn't match what it should, `test-js-with-node` (aka the frontend tests) raises an `AssertionError`, with a small diff of the conflicting strings: ``` assert.js:85 throw new assert.AssertionError({ ^ AssertionError: 'foo' == 'bar' at /srv/zulip/frontend_tests/node_tests/echo.js:156:14 ... ``` However, when the strings are longer it's hard to identify the difference between them because they aren't readably organized. The same issue exists with the backend markdown tests as well, with the added problem that if they're long enough, they get truncated (which can potentially skip the difference between both strings). ``` AssertionError: '<p><span class="katex-display"><span class="katex"><span class ="katex-mathml"><math><semantics><mrow><msub><mi mathvariant="nor == '<p><span class="katex-display"><span class="katex"><span class="katex-mathml"><math> <semantics><mrow><msub><mi mathvariant="nor ``` # Additional information The truncation problem in the backend Bugdown probably depends on the `maxDiff` setting; we may be able to fix it in general by updating the Bugdown test classes. For the pretty-printing HTML diff, it's not clear what is required, but it might be worth looking at how Markdown processors write their own unit tests (for `marked.js` and `python-markdown` to see if they have a good solution for this that we can just import)
Hello @zulip/server-testing members, this issue was labeled with the **area: testing-infrastructure** label, so you may want to check it out! Hello @zulip/server-markdown members, this issue was labeled with the **area: markdown** label, so you may want to check it out! Edited the description a bit :) @PentaHiggs since you've been working on markdown, this might be an issue worth working on. I imagine done well, it'd make life a lot easier for reading things like the `katex` output :). I'd look for a third-party HTML diff printer (actually, I think we're using `lxml` for this in `zerver/lib/html_diff.py` -- maybe worth looking at that?). Also CC @HarshitOnGitHub who wrote that `html_diff.py` code. Thanks for pointing it out to me! I've been thinking of doing something about that; the `katex` output diffs are more or less unreadable sometimes. @zulipbot claim Hello @PentaHiggs, you have been unassigned from this issue because you have not updated this issue or any referenced pull requests for over 14 days. You can reclaim this issue or claim any other issue by commenting `@zulipbot claim` on that issue. Thanks for your contributions, and hope to see you again soon!
2017-12-15T04:37:58
zulip/zulip
7,818
zulip__zulip-7818
[ "5672" ]
90c71f3ca675b6da0d577db44bc23a99b6657dd6
diff --git a/zerver/lib/integrations.py b/zerver/lib/integrations.py --- a/zerver/lib/integrations.py +++ b/zerver/lib/integrations.py @@ -284,6 +284,7 @@ def __init__(self, name: str, *args: Any, **kwargs: Any) -> None: display_name='Desk.com', stream_name='desk' ), + WebhookIntegration('dropbox', ['productivity'], display_name='Dropbox'), WebhookIntegration('freshdesk', ['customer-support']), GithubIntegration( 'github', diff --git a/zerver/webhooks/dropbox/__init__.py b/zerver/webhooks/dropbox/__init__.py new file mode 100644 diff --git a/zerver/webhooks/dropbox/view.py b/zerver/webhooks/dropbox/view.py new file mode 100644 --- /dev/null +++ b/zerver/webhooks/dropbox/view.py @@ -0,0 +1,18 @@ +from typing import Text +from django.http import HttpRequest, HttpResponse +from zerver.lib.actions import check_send_stream_message +from zerver.lib.response import json_success +from zerver.decorator import REQ, has_request_variables, api_key_only_webhook_view +from zerver.models import UserProfile + +@api_key_only_webhook_view('Dropbox') +@has_request_variables +def api_dropbox_webhook(request: HttpRequest, user_profile: UserProfile, + stream: Text=REQ(default='test'), + topic: Text=REQ(default='Dropbox')) -> HttpResponse: + if request.method == 'GET': + return HttpResponse(request.GET['challenge']) + elif request.method == 'POST': + check_send_stream_message(user_profile, request.client, + stream, topic, "File has been updated on Dropbox!") + return json_success()
diff --git a/zerver/webhooks/dropbox/tests.py b/zerver/webhooks/dropbox/tests.py new file mode 100644 --- /dev/null +++ b/zerver/webhooks/dropbox/tests.py @@ -0,0 +1,28 @@ +# -*- coding: utf-8 -*- +from typing import Text + +from zerver.lib.test_classes import WebhookTestCase + +class DropboxHookTests(WebhookTestCase): + STREAM_NAME = 'test' + URL_TEMPLATE = "/api/v1/external/dropbox?&api_key={api_key}" + FIXTURE_DIR_NAME = 'dropbox' + + def test_file_updated(self) -> None: + expected_subject = u"Dropbox" + expected_message = u"File has been updated on Dropbox!" + + self.send_and_test_stream_message('file_updated', expected_subject, expected_message, + content_type="application/x-www-form-urlencoded") + + def get_body(self, fixture_name: Text) -> Text: + return self.fixture_data("dropbox", fixture_name, file_type="json") + + def test_verification_request(self) -> None: + self.subscribe(self.test_user, self.STREAM_NAME) + get_params = {'stream_name': self.STREAM_NAME, + 'challenge': '9B2SVL4orbt5DxLMqJHI6pOTipTqingt2YFMIO0g06E', + 'api_key': self.test_user.api_key} + result = self.client_get(self.url, get_params) + + self.assert_in_response('9B2SVL4orbt5DxLMqJHI6pOTipTqingt2YFMIO0g06E', result)
integrations: Add Dropbox integration. It seems fitting for us to support a Dropbox integration! The Dropbox API supports webhooks to notify when a file has been modified, which could be potentially useful for organizations with shared files. If anyone has any other suggestions for what such an integration could do, let me know.
@zulipbot claim Hello @zulip/server-integrations members, this issue was labeled with the **area: integrations** label, so you may want to check it out! Hello @cory2067, you have been unassigned from this issue because you have not updated this issue or any referenced pull requests for over 10 days. You can reclaim this issue or claim any other issue by commenting `@zulipbot claim` on that issue. Thanks for your contributions, and hope to see you again soon! @zulipbot claim
2017-12-18T01:07:07
zulip/zulip
7,842
zulip__zulip-7842
[ "7835" ]
cc08becb2c8a9cb9b9ddd807764c7ec871360d18
diff --git a/version.py b/version.py --- a/version.py +++ b/version.py @@ -1,3 +1,3 @@ ZULIP_VERSION = "1.7.1+git" -PROVISION_VERSION = '14.2' +PROVISION_VERSION = '14.3' diff --git a/zerver/lib/email_mirror.py b/zerver/lib/email_mirror.py --- a/zerver/lib/email_mirror.py +++ b/zerver/lib/email_mirror.py @@ -1,9 +1,9 @@ -from typing import Any, Dict, List, Optional, Text +from typing import Any, Dict, List, Optional, Text, Union import logging import re -from email.header import decode_header +from email.header import decode_header, Header import email.message as message from django.conf import settings @@ -284,7 +284,7 @@ def find_emailgateway_recipient(message: message.Message) -> Text: # it is more accurate, so try to find the most-accurate # recipient list in descending priority order recipient_headers = ["X-Gm-Original-To", "Delivered-To", "To"] - recipients = [] # type: List[Text] + recipients = [] # type: List[Union[Text, Header]] for recipient_header in recipient_headers: r = message.get_all(recipient_header, None) if r: @@ -293,7 +293,7 @@ def find_emailgateway_recipient(message: message.Message) -> Text: pattern_parts = [re.escape(part) for part in settings.EMAIL_GATEWAY_PATTERN.split('%s')] match_email_re = re.compile(".*?".join(pattern_parts)) - for recipient_email in recipients: + for recipient_email in [str(recipient) for recipient in recipients]: if match_email_re.match(recipient_email): return recipient_email
Upgrade mypy to 0.560 There's a new mypy release! http://mypy-lang.blogspot.com/2017/12/mypy-0560-released.html It'd be cool to upgrade -- I don't think the headline feature matters to us, but this sounds pretty neat: > Speed improvements in incremental mode and there are several improvements in error messages, and bugfixes, that will be nice improvements to the experience. @neiljp , would you be up for doing this upgrade?
Hello @zulip/server-tooling members, this issue was labeled with the **area: tooling** label, so you may want to check it out! <!-- areaLabelNotification --> @gnprice I'd like to, but on my personal system the tools are rather demanding (wrt root) and my disk is rather small; this seems like the appropriate time to re-request a droplet? I think mine was destroyed at the end of PyCon. (I appreciate the desire to operate in a consistent dev environment to run the tools, but it'd be useful to not require this) @neiljp Yeah, you'd definitely be quite welcome to a new droplet! Just make a request in chat per the instructions at https://zulip.readthedocs.io/en/latest/development/request-remote.html , and someone will set you up. For the broader question of how the dev environment is designed, I think it'd be tough to have a full dev environment (with database, rabbitmq, etc.) without installing system software. And I think it's pretty uncommon for people to be doing development on Zulip and not need a dev environment where the app can actually run. So it probably makes sense for us to keep handling those situations with our current strategy of liberally providing remote dev VMs. @zulipbot claim
2017-12-19T23:56:01
zulip/zulip
7,864
zulip__zulip-7864
[ "7596" ]
3d4ee3b86228c4a4e26fea8d1ca2fbbb5241aade
diff --git a/zerver/lib/bugdown/__init__.py b/zerver/lib/bugdown/__init__.py --- a/zerver/lib/bugdown/__init__.py +++ b/zerver/lib/bugdown/__init__.py @@ -1389,7 +1389,7 @@ def extendMarkdown(self, md: markdown.Markdown, md_globals: Dict[str, Any]) -> N # Custom strikethrough syntax: ~~foo~~ md.inlinePatterns.add('del', markdown.inlinepatterns.SimpleTagPattern( - r'(?<!~)(\~\~)([^~{0}\n]+?)\2(?!~)', 'del'), '>strong') + r'(?<!~)(\~\~)([^~\n]+?)(\~\~)(?!~)', 'del'), '>strong') # Text inside ** must start and end with a word character # it need for things like "const char *x = (char *)y"
diff --git a/zerver/fixtures/markdown_test_cases.json b/zerver/fixtures/markdown_test_cases.json --- a/zerver/fixtures/markdown_test_cases.json +++ b/zerver/fixtures/markdown_test_cases.json @@ -188,6 +188,12 @@ "expected_output": "<p>I <del> like software </del> love hardware</p>", "text_content": "I like software love hardware" }, + { + "name": "strikthrough_link", + "input": "~~test http://xx.xx link~~", + "expected_output": "<p><del>test <a href=\"http://xx.xx\" target=\"_blank\" title=\"http://xx.xx\">http://xx.xx</a> link</del></p>", + "text_content": "test http://xx.xx link" + }, { "name": "underscore_disabled", "input": "_foo_",
Strikethrough doesn't work with auto-links @tommyip pointed out that if you write something like `~~sdfsddsaf #7515 fffsdafdsa~~`, with an auto-linkified term inside strikethrough markup `~~`, the strikethrough doesn't happen: ![](https://chat.zulip.org/user_uploads/2/9b/jCZxws2flyppXlQWKDESvRf-/pasted_image.png)
Hello @zulip/server-markdown members, this issue was labeled with the **area: markdown** label, so you may want to check it out! <!-- areaLabelNotification --> @zulipbot claim. Hello @akashnimare, you claimed this issue to work on it, but this issue and any referenced pull requests haven't been updated for 10 days. Are you still working on this issue? If so, please update this issue by leaving a comment on this issue to let me know that you're still working on it. Otherwise, I'll automatically remove you from this issue in 4 days. If you've decided to work on something else, simply comment `@zulipbot abandon` so that someone else can claim it and continue from where you left off. Thank you for your valuable contributions to Zulip! <!-- inactiveWarning -->
2017-12-22T12:00:25
zulip/zulip
7,918
zulip__zulip-7918
[ "7537" ]
a49fd446126b0d45d93ea3cf4954eb05b996c78c
diff --git a/zerver/lib/bugdown/__init__.py b/zerver/lib/bugdown/__init__.py --- a/zerver/lib/bugdown/__init__.py +++ b/zerver/lib/bugdown/__init__.py @@ -1,6 +1,7 @@ # Zulip's main markdown implementation. See docs/subsystems/markdown.md for # detailed documentation on our markdown syntax. -from typing import Any, Callable, Dict, Iterable, List, Optional, Set, Text, Tuple, TypeVar, Union +from typing import (Any, Callable, Dict, Iterable, List, NamedTuple, + Optional, Set, Text, Tuple, TypeVar, Union) from mypy_extensions import TypedDict from typing.re import Match @@ -143,6 +144,47 @@ def walk_tree(root: Element, return results +ElementFamily = NamedTuple('ElementFamily', [ + ('grandparent', Optional[Element]), + ('parent', Element), + ('child', Element) +]) + +ResultWithFamily = NamedTuple('ResultWithFamily', [ + ('family', ElementFamily), + ('result', Any) +]) + +def walk_tree_with_family(root: Element, + processor: Callable[[Element], Optional[_T]] + ) -> List[ResultWithFamily]: + results = [] + + queue = deque([{'parent': None, 'value': root}]) + while queue: + currElementPair = queue.popleft() + for child in currElementPair['value'].getchildren(): + if child.getchildren(): + queue.append({'parent': currElementPair, 'value': child}) # type: ignore # Lack of Deque support in typing module for Python 3.4.3 + result = processor(child) + if result is not None: + if currElementPair['parent']: + grandparent = currElementPair['parent']['value'] + else: + grandparent = None + family = ElementFamily( + grandparent=grandparent, + parent=currElementPair['value'], + child=child + ) + + results.append(ResultWithFamily( + family=family, + result=result + )) + + return results + # height is not actually used def add_a( root: Element, @@ -151,13 +193,19 @@ def add_a( title: Optional[Text]=None, desc: Optional[Text]=None, class_attr: Text="message_inline_image", - data_id: Optional[Text]=None + data_id: Optional[Text]=None, + insertion_index: Optional[int]=None ) -> None: title = title if title is not None else url_filename(link) title = title if title else "" desc = desc if desc is not None else "" - div = markdown.util.etree.SubElement(root, "div") + if insertion_index is not None: + div = markdown.util.etree.Element("div") + root.insert(insertion_index, div) + else: + div = markdown.util.etree.SubElement(root, "div") + div.set("class", class_attr) a = markdown.util.etree.SubElement(div, "a") a.set("href", link) @@ -175,7 +223,6 @@ def add_a( desc_div = markdown.util.etree.SubElement(summary_div, "desc") desc_div.set("class", "message_inline_image_desc") - def add_embed(root: Element, link: Text, extracted_data: Dict[Text, Any]) -> None: container = markdown.util.etree.SubElement(root, "div") container.set("class", "message_embed") @@ -665,27 +712,79 @@ def get_url_data(self, e: Element) -> Optional[Tuple[Text, Text]]: return (e.get("href"), e.get("href")) return None - def is_only_element(self, root: Element, url: str) -> bool: - # Check if the url is the only content of the message. + def handle_image_inlining(self, root: Element, found_url: ResultWithFamily) -> None: + grandparent = found_url.family.grandparent + parent = found_url.family.parent + ahref_element = found_url.family.child + (url, text) = found_url.result + actual_url = self.get_actual_image_url(url) - if not len(root) == 1: - return False + # url != text usually implies a named link, which we opt not to remove + url_eq_text = (url == text) - # Generate a <p><a>url</a></p> element - expected_elem = markdown.util.etree.Element('p') - expected_elem.append(url_to_a(url)) - expected_html = markdown.util.etree.tostring(expected_elem) + if parent.tag == 'li': + add_a(parent, self.get_actual_image_url(url), url, title=text) + if not parent.text and not ahref_element.tail and url_eq_text: + parent.remove(ahref_element) - actual_html = markdown.util.etree.tostring(root[0]) + elif parent.tag == 'p': + parent_index = None + for index, uncle in enumerate(grandparent.getchildren()): + if uncle is parent: + parent_index = index + break - if not actual_html.strip() == expected_html.strip(): - return False + if parent_index is not None: + ins_index = self.find_proper_insertion_index(grandparent, parent, parent_index) + add_a(grandparent, actual_url, url, title=text, insertion_index=ins_index) - return True + else: + # We're not inserting after parent, since parent not found. + # Append to end of list of grandparent's children as normal + add_a(grandparent, actual_url, url, title=text) + + # If link is alone in a paragraph, delete paragraph containing it + if (len(parent.getchildren()) == 1 and + (not parent.text or parent.text == "\n") and + not ahref_element.tail and + url_eq_text): + grandparent.remove(parent) + + else: + # If none of the above criteria match, fall back to old behavior + add_a(root, actual_url, url, title=text) + + def find_proper_insertion_index(self, grandparent: Element, parent: Element, + parent_index_in_grandparent: int) -> int: + # If there are several inline images from same paragraph, ensure that + # they are in correct (and not opposite) order by inserting after last + # inline image from paragraph 'parent' + + uncles = grandparent.getchildren() + parent_links = [ele.attrib['href'] for ele in parent.iter(tag="a")] + insertion_index = parent_index_in_grandparent + + while True: + insertion_index += 1 + if insertion_index >= len(uncles): + return insertion_index + + uncle = uncles[insertion_index] + inline_image_classes = ['message_inline_image', 'message_inline_ref'] + if ( + uncle.tag != 'div' or + 'class' not in uncle.keys() or + uncle.attrib['class'] not in inline_image_classes + ): + return insertion_index + + uncle_link = list(uncle.iter(tag="a"))[0].attrib['href'] + if uncle_link not in parent_links: + return insertion_index def run(self, root: Element) -> None: # Get all URLs from the blob - found_urls = walk_tree(root, self.get_url_data) + found_urls = walk_tree_with_family(root, self.get_url_data) # If there are more than 5 URLs in the message, don't do inline previews if len(found_urls) == 0 or len(found_urls) > 5: @@ -693,7 +792,8 @@ def run(self, root: Element) -> None: rendered_tweet_count = 0 - for (url, text) in found_urls: + for found_url in found_urls: + (url, text) = found_url.result dropbox_image = self.dropbox_image(url) if dropbox_image is not None: @@ -708,10 +808,7 @@ def run(self, root: Element) -> None: class_attr=class_attr) continue if self.is_image(url): - if len(found_urls) == 1 and self.is_only_element(root, url): - # If the complete message is the image link, remove the link - root.remove(root[0]) - add_a(root, self.get_actual_image_url(url), url, title=text) + self.handle_image_inlining(root, found_url) continue if get_tweet_id(url) is not None: if rendered_tweet_count >= self.TWITTER_MAX_TO_PREVIEW:
diff --git a/zerver/fixtures/markdown_test_cases.json b/zerver/fixtures/markdown_test_cases.json --- a/zerver/fixtures/markdown_test_cases.json +++ b/zerver/fixtures/markdown_test_cases.json @@ -258,6 +258,13 @@ "backend_only_rendering": true, "text_content": "Google logo today: https:\/\/www.google.com\/images\/srpr\/logo4w.png\nKinda boring\n" }, + { + "name": "blockquote_inline_image", + "input": ">Google logo today:\n>https://www.google.com/images/srpr/logo4w.png\n>Kinda boring", + "expected_output": "<blockquote>\n<p>Google logo today:<br>\n<a href=\"https://www.google.com/images/srpr/logo4w.png\" target=\"_blank\" title=\"https://www.google.com/images/srpr/logo4w.png\">https://www.google.com/images/srpr/logo4w.png</a><br>\nKinda boring</p>\n<div class=\"message_inline_image\"><a href=\"https://www.google.com/images/srpr/logo4w.png\" target=\"_blank\" title=\"https://www.google.com/images/srpr/logo4w.png\"><img src=\"https://www.google.com/images/srpr/logo4w.png\"></a></div></blockquote>", + "backend_only_rendering": true, + "text_content": "\nGoogle logo today:\nhttps:\/\/www.google.com\/images\/srpr\/logo4w.png\nKinda boring\n" + }, { "name": "two_inline_images", "input": "Google logo today: https://www.google.com/images/srpr/logo4w.png\nKinda boringGoogle logo today: https://www.google.com/images/srpr/logo4w.png\nKinda boring", @@ -265,12 +272,26 @@ "backend_only_rendering": true, "text_content": "Google logo today: https:\/\/www.google.com\/images\/srpr\/logo4w.png\nKinda boringGoogle logo today: https:\/\/www.google.com\/images\/srpr\/logo4w.png\nKinda boring\n" }, + { + "name": "bulleted_list_inlining", + "input": "* Google?\n* Google. https://www.google.com/images/srpr/logo4w.png\n* Google!", + "expected_output": "<ul>\n<li>Google?</li>\n<li>Google. <a href=\"https://www.google.com/images/srpr/logo4w.png\" target=\"_blank\" title=\"https://www.google.com/images/srpr/logo4w.png\">https://www.google.com/images/srpr/logo4w.png</a><div class=\"message_inline_image\"><a href=\"https://www.google.com/images/srpr/logo4w.png\" target=\"_blank\" title=\"https://www.google.com/images/srpr/logo4w.png\"><img src=\"https://www.google.com/images/srpr/logo4w.png\"></a></div></li>\n<li>Google!</li>\n</ul>", + "backend_only_rendering": true, + "text_content": "\nGoogle?\nGoogle. https://www.google.com/images/srpr/logo4w.png\nGoogle!\n" + }, { "name": "only_inline_image", "input": "https://www.google.com/images/srpr/logo4w.png", "expected_output": "<div class=\"message_inline_image\"><a href=\"https://www.google.com/images/srpr/logo4w.png\" target=\"_blank\" title=\"https://www.google.com/images/srpr/logo4w.png\"><img src=\"https://www.google.com/images/srpr/logo4w.png\"></a></div>", "backend_only_rendering": true }, + { + "name": "only_named_inline_image", + "input": "[Google Link](https://www.google.com/images/srpr/logo4w.png)", + "expected_output": "<p><a href=\"https://www.google.com/images/srpr/logo4w.png\" target=\"_blank\" title=\"https://www.google.com/images/srpr/logo4w.png\">Google Link</a></p>\n<div class=\"message_inline_image\"><a href=\"https://www.google.com/images/srpr/logo4w.png\" target=\"_blank\" title=\"Google Link\"><img src=\"https://www.google.com/images/srpr/logo4w.png\"></a></div>", + "backend_only_rendering": true, + "text_content": "Google Link\n" + }, { "name": "only_non_image_link", "input": "https://github.com", diff --git a/zerver/tests/test_bugdown.py b/zerver/tests/test_bugdown.py --- a/zerver/tests/test_bugdown.py +++ b/zerver/tests/test_bugdown.py @@ -320,8 +320,16 @@ def test_inline_image_preview_order(self) -> None: converted = render_markdown(msg, content) self.assertEqual(converted, expected) + content = 'http://imaging.nikon.com/lineup/dslr/df/img/sample/img_01.jpg\n\n>http://imaging.nikon.com/lineup/dslr/df/img/sample/img_02.jpg\n\n* http://imaging.nikon.com/lineup/dslr/df/img/sample/img_03.jpg\n* https://www.google.com/images/srpr/logo4w.png' + expected = '<div class="message_inline_image"><a href="http://imaging.nikon.com/lineup/dslr/df/img/sample/img_01.jpg" target="_blank" title="http://imaging.nikon.com/lineup/dslr/df/img/sample/img_01.jpg"><img src="https://external-content.zulipcdn.net/1081f3eb3d307ff5b578c1f5ce9d4cef8f8953c4/687474703a2f2f696d6167696e672e6e696b6f6e2e636f6d2f6c696e6575702f64736c722f64662f696d672f73616d706c652f696d675f30312e6a7067"></a></div><blockquote>\n<div class="message_inline_image"><a href="http://imaging.nikon.com/lineup/dslr/df/img/sample/img_02.jpg" target="_blank" title="http://imaging.nikon.com/lineup/dslr/df/img/sample/img_02.jpg"><img src="https://external-content.zulipcdn.net/8a2da7577389c522fab18ba2e6d6947b85458074/687474703a2f2f696d6167696e672e6e696b6f6e2e636f6d2f6c696e6575702f64736c722f64662f696d672f73616d706c652f696d675f30322e6a7067"></a></div></blockquote>\n<ul>\n<li><div class="message_inline_image"><a href="http://imaging.nikon.com/lineup/dslr/df/img/sample/img_03.jpg" target="_blank" title="http://imaging.nikon.com/lineup/dslr/df/img/sample/img_03.jpg"><img src="https://external-content.zulipcdn.net/9c389273b239846aa6e07e109216773934e52828/687474703a2f2f696d6167696e672e6e696b6f6e2e636f6d2f6c696e6575702f64736c722f64662f696d672f73616d706c652f696d675f30332e6a7067"></a></div></li>\n<li><div class="message_inline_image"><a href="https://www.google.com/images/srpr/logo4w.png" target="_blank" title="https://www.google.com/images/srpr/logo4w.png"><img src="https://www.google.com/images/srpr/logo4w.png"></a></div></li>\n</ul>' + + sender_user_profile = self.example_user('othello') + msg = Message(sender=sender_user_profile, sending_client=get_client("test")) + converted = render_markdown(msg, content) + self.assertEqual(converted, expected) + content = 'Test 1\n[21136101110_1dde1c1a7e_o.jpg](/user_uploads/1/6d/F1PX6u16JA2P-nK45PyxHIYZ/21136101110_1dde1c1a7e_o.jpg) \n\nNext Image\n[IMG_20161116_023910.jpg](/user_uploads/1/69/sh7L06e7uH7NaX6d5WFfVYQp/IMG_20161116_023910.jpg) \n\nAnother Screenshot\n[Screenshot-from-2016-06-01-16-22-42.png](/user_uploads/1/70/_aZmIEWaN1iUaxwkDjkO7bpj/Screenshot-from-2016-06-01-16-22-42.png)' - expected = '<p>Test 1<br>\n<a href="/user_uploads/1/6d/F1PX6u16JA2P-nK45PyxHIYZ/21136101110_1dde1c1a7e_o.jpg" target="_blank" title="21136101110_1dde1c1a7e_o.jpg">21136101110_1dde1c1a7e_o.jpg</a> </p>\n<p>Next Image<br>\n<a href="/user_uploads/1/69/sh7L06e7uH7NaX6d5WFfVYQp/IMG_20161116_023910.jpg" target="_blank" title="IMG_20161116_023910.jpg">IMG_20161116_023910.jpg</a> </p>\n<p>Another Screenshot<br>\n<a href="/user_uploads/1/70/_aZmIEWaN1iUaxwkDjkO7bpj/Screenshot-from-2016-06-01-16-22-42.png" target="_blank" title="Screenshot-from-2016-06-01-16-22-42.png">Screenshot-from-2016-06-01-16-22-42.png</a></p>\n<div class="message_inline_image"><a href="/user_uploads/1/6d/F1PX6u16JA2P-nK45PyxHIYZ/21136101110_1dde1c1a7e_o.jpg" target="_blank" title="21136101110_1dde1c1a7e_o.jpg"><img src="/user_uploads/1/6d/F1PX6u16JA2P-nK45PyxHIYZ/21136101110_1dde1c1a7e_o.jpg"></a></div><div class="message_inline_image"><a href="/user_uploads/1/69/sh7L06e7uH7NaX6d5WFfVYQp/IMG_20161116_023910.jpg" target="_blank" title="IMG_20161116_023910.jpg"><img src="/user_uploads/1/69/sh7L06e7uH7NaX6d5WFfVYQp/IMG_20161116_023910.jpg"></a></div><div class="message_inline_image"><a href="/user_uploads/1/70/_aZmIEWaN1iUaxwkDjkO7bpj/Screenshot-from-2016-06-01-16-22-42.png" target="_blank" title="Screenshot-from-2016-06-01-16-22-42.png"><img src="/user_uploads/1/70/_aZmIEWaN1iUaxwkDjkO7bpj/Screenshot-from-2016-06-01-16-22-42.png"></a></div>' + expected = '<p>Test 1<br>\n<a href="/user_uploads/1/6d/F1PX6u16JA2P-nK45PyxHIYZ/21136101110_1dde1c1a7e_o.jpg" target="_blank" title="21136101110_1dde1c1a7e_o.jpg">21136101110_1dde1c1a7e_o.jpg</a> </p>\n<div class="message_inline_image"><a href="/user_uploads/1/6d/F1PX6u16JA2P-nK45PyxHIYZ/21136101110_1dde1c1a7e_o.jpg" target="_blank" title="21136101110_1dde1c1a7e_o.jpg"><img src="/user_uploads/1/6d/F1PX6u16JA2P-nK45PyxHIYZ/21136101110_1dde1c1a7e_o.jpg"></a></div><p>Next Image<br>\n<a href="/user_uploads/1/69/sh7L06e7uH7NaX6d5WFfVYQp/IMG_20161116_023910.jpg" target="_blank" title="IMG_20161116_023910.jpg">IMG_20161116_023910.jpg</a> </p>\n<div class="message_inline_image"><a href="/user_uploads/1/69/sh7L06e7uH7NaX6d5WFfVYQp/IMG_20161116_023910.jpg" target="_blank" title="IMG_20161116_023910.jpg"><img src="/user_uploads/1/69/sh7L06e7uH7NaX6d5WFfVYQp/IMG_20161116_023910.jpg"></a></div><p>Another Screenshot<br>\n<a href="/user_uploads/1/70/_aZmIEWaN1iUaxwkDjkO7bpj/Screenshot-from-2016-06-01-16-22-42.png" target="_blank" title="Screenshot-from-2016-06-01-16-22-42.png">Screenshot-from-2016-06-01-16-22-42.png</a></p>\n<div class="message_inline_image"><a href="/user_uploads/1/70/_aZmIEWaN1iUaxwkDjkO7bpj/Screenshot-from-2016-06-01-16-22-42.png" target="_blank" title="Screenshot-from-2016-06-01-16-22-42.png"><img src="/user_uploads/1/70/_aZmIEWaN1iUaxwkDjkO7bpj/Screenshot-from-2016-06-01-16-22-42.png"></a></div>' msg = Message(sender=sender_user_profile, sending_client=get_client("test")) converted = render_markdown(msg, content)
Improve logic for placing inline image previews within messages As a follow-up to #7324, I think we want the specification for links to images to work like this: * If the link is on a line by itself, show the preview in place of the link where it was (extending the #7529 logic, which does that, but only for messages that are just a link on a line by themselves). * If the link is is the middle of a paragraph (or similar), show the inline image preview just after that paragraph/bullet/whatever, rather than at the end of the message. * Do something reasonable for the corner cases like "line-on-itself-in-quote-block".
Hello @zulip/server-markdown members, this issue was labeled with the **area: markdown** label, so you may want to check it out! <!-- areaLabelNotification --> @zulipbot claim @zulipbot abandon @zulipbot claim
2017-12-27T20:37:52
zulip/zulip
7,969
zulip__zulip-7969
[ "7968", "7968" ]
ec2015ee8c88693467537f77746870bc6a58db6e
diff --git a/zilencer/management/commands/populate_db.py b/zilencer/management/commands/populate_db.py --- a/zilencer/management/commands/populate_db.py +++ b/zilencer/management/commands/populate_db.py @@ -62,6 +62,12 @@ def add_arguments(self, parser: CommandParser) -> None: default=0, help='The number of extra bots to create') + parser.add_argument('--extra-streams', + dest='extra_streams', + type=int, + default=0, + help='The number of extra streams to create') + parser.add_argument('--huddles', dest='num_huddles', type=int, @@ -338,6 +344,20 @@ def handle(self, **options: Any) -> None: "errors": {"description": "For errors", "invite_only": False}, "sales": {"description": "For sales discussion", "invite_only": False} } # type: Dict[Text, Dict[Text, Any]] + + for i in range(options['extra_streams']): + # Since a stream with name "Extra Stream 3" could show up + # after "Extra Stream 29", pad the numbers with 0s. + maximum_digits = len(str(options['extra_streams'] - 1)) + number_str = str(i).zfill(maximum_digits) + + extra_stream_name = 'Extra Stream ' + number_str + + zulip_stream_dict[extra_stream_name] = { + "description": "Auto-generated extra stream.", + "invite_only": False, + } + bulk_create_streams(zulip_realm, zulip_stream_dict) # Now that we've created the notifications stream, configure it properly. zulip_realm.notifications_stream = get_stream("announce", zulip_realm)
manual testing: Make it easy to subscribe to many streams. We should add an option like `extra-streams` to `populate_db.py` that makes it easy to create lots of streams for manual testing. We should subscribe Cordelia/Hamlet/etc. to most of the streams. manual testing: Make it easy to subscribe to many streams. We should add an option like `extra-streams` to `populate_db.py` that makes it easy to create lots of streams for manual testing. We should subscribe Cordelia/Hamlet/etc. to most of the streams.
Hello @zulip/server-testing members, this issue was labeled with the **area: testing-infrastructure** label, so you may want to check it out! <!-- areaLabelNotification --> Hello @zulip/server-testing members, this issue was labeled with the **area: testing-infrastructure** label, so you may want to check it out! <!-- areaLabelNotification -->
2018-01-02T18:07:27
zulip/zulip
8,036
zulip__zulip-8036
[ "7743" ]
ec1297c1e899bca4a452689b70a63b424e3d4bb1
diff --git a/zerver/webhooks/gitlab/view.py b/zerver/webhooks/gitlab/view.py --- a/zerver/webhooks/gitlab/view.py +++ b/zerver/webhooks/gitlab/view.py @@ -264,13 +264,15 @@ def get_object_url(payload: Dict[str, Any]) -> Text: @has_request_variables def api_gitlab_webhook(request: HttpRequest, user_profile: UserProfile, stream: Text=REQ(default='gitlab'), + topic: Text=REQ(default=None), payload: Dict[str, Any]=REQ(argument_type='body'), branches: Optional[Text]=REQ(default=None)) -> HttpResponse: event = get_event(request, payload, branches) if event is not None: body = get_body_based_on_event(event)(payload) - subject = get_subject_based_on_event(event, payload) - check_send_stream_message(user_profile, request.client, stream, subject, body) + if topic is None: + topic = get_subject_based_on_event(event, payload) + check_send_stream_message(user_profile, request.client, stream, topic, body) return json_success() def get_body_based_on_event(event: str) -> Any:
diff --git a/zerver/webhooks/gitlab/tests.py b/zerver/webhooks/gitlab/tests.py --- a/zerver/webhooks/gitlab/tests.py +++ b/zerver/webhooks/gitlab/tests.py @@ -11,6 +11,12 @@ class GitlabHookTests(WebhookTestCase): URL_TEMPLATE = "/api/v1/external/gitlab?&api_key={api_key}&stream={stream}" FIXTURE_DIR_NAME = 'gitlab' + def test_push_event_specified_topic(self) -> None: + self.url = self.build_webhook_url("topic=Specific%20topic") + expected_topic = u"Specific topic" + expected_message = u"Tomasz Kolek [pushed](https://gitlab.com/tomaszkolek0/my-awesome-project/compare/5fcdd5551fc3085df79bece2c32b1400802ac407...eb6ae1e591e0819dc5bf187c6bfe18ec065a80e9) 2 commits to branch tomek.\n\n* b ([66abd2d](https://gitlab.com/tomaszkolek0/my-awesome-project/commit/66abd2da28809ffa128ed0447965cf11d7f863a7))\n* c ([eb6ae1e](https://gitlab.com/tomaszkolek0/my-awesome-project/commit/eb6ae1e591e0819dc5bf187c6bfe18ec065a80e9))" + self.send_and_test_stream_message('push', expected_topic, expected_message, HTTP_X_GITLAB_EVENT="Push Hook") + def test_push_event_message(self) -> None: expected_subject = u"my-awesome-project / tomek" expected_message = u"Tomasz Kolek [pushed](https://gitlab.com/tomaszkolek0/my-awesome-project/compare/5fcdd5551fc3085df79bece2c32b1400802ac407...eb6ae1e591e0819dc5bf187c6bfe18ec065a80e9) 2 commits to branch tomek.\n\n* b ([66abd2d](https://gitlab.com/tomaszkolek0/my-awesome-project/commit/66abd2da28809ffa128ed0447965cf11d7f863a7))\n* c ([eb6ae1e](https://gitlab.com/tomaszkolek0/my-awesome-project/commit/eb6ae1e591e0819dc5bf187c6bfe18ec065a80e9))"
integrations: Add topic query parameter for gitlab integration. https://chat.zulip.org/#narrow/stream/integrations/topic/gitlab.20configuration.20request
Hello @zulip/server-integrations members, this issue was labeled with the **area: integrations** label, so you may want to check it out! <!-- areaLabelNotification --> @rishig can take up this issue? It's best to work on only one issue at a time, but other than that go ahead! @rishig can you help me with what i am supposed to do? I would like to work on this bug. Could anyone explain about this bug? @zulipbot claim Welcome to Zulip, @fossbalaji! We just sent you an invite to collaborate on this repository at https://github.com/zulip/zulip/invitations. Please accept this invite in order to claim this issue and begin a fun, rewarding experience contributing to Zulip! Here's some tips to get you off to a good start: * Join me on the [Zulip developers' server](https://chat.zulip.org), to get help, chat about this issue, and meet the other developers. * Sign the [Dropbox Contributor License Agreement](https://opensource.dropbox.com/cla/), so that Zulip can use your code. * [Unwatch this repository](https://help.github.com/articles/unwatching-repositories/), so that you don't get 100 emails a day. As you work on this issue, you'll also want to refer to the [Zulip code contribution guide](https://zulip.readthedocs.io/en/latest/contributing/index.html), as well as the rest of the developer documentation on that site. See you on the other side (that is, the pull request side)! Hello @fossbalaji, you have been unassigned from this issue because you have not updated this issue or any referenced pull requests for over 14 days. You can reclaim this issue or claim any other issue by commenting `@zulipbot claim` on that issue. Thanks for your contributions, and hope to see you again soon! @zulipbot claim Welcome to Zulip, @hrishi3108! We just sent you an invite to collaborate on this repository at https://github.com/zulip/zulip/invitations. Please accept this invite in order to claim this issue and begin a fun, rewarding experience contributing to Zulip! Here's some tips to get you off to a good start: * Join me on the [Zulip developers' server](https://chat.zulip.org), to get help, chat about this issue, and meet the other developers. * Sign the [Dropbox Contributor License Agreement](https://opensource.dropbox.com/cla/), so that Zulip can use your code. * [Unwatch this repository](https://help.github.com/articles/unwatching-repositories/), so that you don't get 100 emails a day. As you work on this issue, you'll also want to refer to the [Zulip code contribution guide](https://zulip.readthedocs.io/en/latest/contributing/index.html), as well as the rest of the developer documentation on that site. See you on the other side (that is, the pull request side)! @zulipbot abandon @zulipbot claim
2018-01-09T11:44:23
zulip/zulip
8,085
zulip__zulip-8085
[ "7411" ]
953478a18908d77e9901e24bebe9fbe99c968774
diff --git a/zerver/lib/actions.py b/zerver/lib/actions.py --- a/zerver/lib/actions.py +++ b/zerver/lib/actions.py @@ -71,7 +71,8 @@ custom_profile_fields_for_realm, get_huddle_user_ids, \ CustomProfileFieldValue, validate_attachment_request, get_system_bot, \ get_display_recipient_by_id, query_for_ids, get_huddle_recipient, \ - UserGroup, UserGroupMembership, get_default_stream_groups + UserGroup, UserGroupMembership, get_default_stream_groups, \ + get_service_dicts_for_bots, get_bot_services from zerver.lib.alert_words import alert_words_in_realm from zerver.lib.avatar import avatar_url @@ -415,6 +416,7 @@ def stream_name(stream: Optional[Stream]) -> Optional[Text]: default_events_register_stream=default_events_register_stream_name, default_all_public_streams=user_profile.default_all_public_streams, avatar_url=avatar_url(user_profile), + services = get_service_dicts_for_bots(user_profile.id), ) # Set the owner key only when the bot has an owner. @@ -4380,6 +4382,24 @@ def do_update_user_group_description(user_group: UserGroup, description: Text) - user_group.save(update_fields=['description']) do_send_user_group_update_event(user_group, dict(description=description)) +def do_update_outgoing_webhook_service(bot_profile, service_interface, service_payload_url): + # type: (UserProfile, int, Text) -> None + # TODO: First service is chosen because currently one bot can only have one service. + # Update this once multiple services are supported. + service = get_bot_services(bot_profile.id)[0] + service.base_url = service_payload_url + service.interface = service_interface + service.save() + send_event(dict(type='realm_bot', + op='update', + bot=dict(email=bot_profile.email, + user_id=bot_profile.id, + services = [dict(base_url=service.base_url, + interface=service.interface)], + ), + ), + bot_owner_user_ids(bot_profile)) + def do_send_user_group_members_update_event(event_name: Text, user_group: UserGroup, user_ids: List[int]) -> None: diff --git a/zerver/models.py b/zerver/models.py --- a/zerver/models.py +++ b/zerver/models.py @@ -1460,6 +1460,7 @@ def get_owned_bot_dicts(user_profile: UserProfile, 'default_all_public_streams': botdict['default_all_public_streams'], 'owner': botdict['bot_owner__email'], 'avatar_url': avatar_url_from_dict(botdict), + 'services': get_service_dicts_for_bots(botdict['id']), } for botdict in result] @@ -1909,6 +1910,14 @@ def get_realm_outgoing_webhook_services_name(realm: Realm) -> List[Any]: def get_bot_services(user_profile_id: str) -> List[Service]: return list(Service.objects.filter(user_profile__id=user_profile_id)) +def get_service_dicts_for_bots(user_profile_id: str) -> List[Dict[str, Any]]: + services = get_bot_services(user_profile_id) + service_dicts = [{'base_url': service.base_url, + 'interface': service.interface, + } + for service in services] + return service_dicts + def get_service_profile(user_profile_id: str, service_name: str) -> Service: return Service.objects.get(user_profile__id=user_profile_id, name=service_name) diff --git a/zerver/views/users.py b/zerver/views/users.py --- a/zerver/views/users.py +++ b/zerver/views/users.py @@ -15,7 +15,7 @@ do_change_is_admin, do_change_default_all_public_streams, \ do_change_default_events_register_stream, do_change_default_sending_stream, \ do_create_user, do_deactivate_user, do_reactivate_user, do_regenerate_api_key, \ - check_change_full_name + check_change_full_name, notify_created_bot, do_update_outgoing_webhook_service from zerver.lib.avatar import avatar_url, get_gravatar_url, get_avatar_field from zerver.lib.bot_config import set_bot_config from zerver.lib.exceptions import JsonableError @@ -156,6 +156,8 @@ def patch_bot_backend( request: HttpRequest, user_profile: UserProfile, email: Text, full_name: Optional[Text]=REQ(default=None), bot_owner: Optional[Text]=REQ(default=None), + service_payload_url: Optional[Text]=REQ(validator=check_url, default=None), + service_interface: Optional[int]=REQ(validator=check_int, default=1), default_sending_stream: Optional[Text]=REQ(default=None), default_events_register_stream: Optional[Text]=REQ(default=None), default_all_public_streams: Optional[bool]=REQ(default=None, validator=check_bool) @@ -190,6 +192,10 @@ def patch_bot_backend( if default_all_public_streams is not None: do_change_default_all_public_streams(bot, default_all_public_streams) + if service_payload_url is not None: + check_valid_interface_type(service_interface) + do_update_outgoing_webhook_service(bot, service_interface, service_payload_url) + if len(request.FILES) == 0: pass elif len(request.FILES) == 1: @@ -203,6 +209,8 @@ def patch_bot_backend( json_result = dict( full_name=bot.full_name, avatar_url=avatar_url(bot), + service_interface = service_interface, + service_payload_url = service_payload_url, default_sending_stream=get_stream_name(bot.default_sending_stream), default_events_register_stream=get_stream_name(bot.default_events_register_stream), default_all_public_streams=bot.default_all_public_streams, @@ -320,6 +328,8 @@ def add_bot_backend( for key, value in config_data.items(): set_bot_config(bot_profile, key, value) + notify_created_bot(bot_profile) + json_result = dict( api_key=bot_profile.api_key, avatar_url=avatar_url(bot_profile),
diff --git a/frontend_tests/node_tests/bot_data.js b/frontend_tests/node_tests/bot_data.js --- a/frontend_tests/node_tests/bot_data.js +++ b/frontend_tests/node_tests/bot_data.js @@ -14,7 +14,9 @@ set_global('$', function (f) { set_global('document', null); var page_params = { - realm_bots: [{email: '[email protected]', full_name: 'Bot 0'}], + realm_bots: [{email: '[email protected]', user_id: 42, full_name: 'Bot 0'}, + {email: '[email protected]', user_id: 314, full_name: "Outgoing webhook", + services: [{base_url: "http://foo.com", interface: 1}]}], is_admin: false, }; set_global('page_params', page_params); @@ -29,34 +31,46 @@ global.people.initialize_current_user(42); bot_data.initialize(); // Our startup logic should have added Bot 0 from page_params. -assert.equal(bot_data.get('[email protected]').full_name, 'Bot 0'); +assert.equal(bot_data.get(42).full_name, 'Bot 0'); +assert.equal(bot_data.get(314).full_name, 'Outgoing webhook'); (function () { var test_bot = { email: '[email protected]', + user_id: 43, avatar_url: '', full_name: 'Bot 1', + services: [{base_url: "http://bar.com", interface: 1}], extra: 'Not in data', }; (function test_add() { bot_data.add(test_bot); - var bot = bot_data.get('[email protected]'); + var bot = bot_data.get(43); + var services = bot_data.get_services(43); assert.equal('Bot 1', bot.full_name); + assert.equal('http://bar.com', services[0].base_url); + assert.equal(1, services[0].interface); assert.equal(undefined, bot.extra); }()); (function test_update() { var bot; + var services; bot_data.add(test_bot); - bot = bot_data.get('[email protected]'); + bot = bot_data.get(43); assert.equal('Bot 1', bot.full_name); - bot_data.update('[email protected]', {full_name: 'New Bot 1'}); - bot = bot_data.get('[email protected]'); + bot_data.update(43, {full_name: 'New Bot 1', + services: [{interface: 2, + base_url: 'http://baz.com'}]}); + bot = bot_data.get(43); + services = bot_data.get_services(43); assert.equal('New Bot 1', bot.full_name); + assert.equal(2, services[0].interface); + assert.equal('http://baz.com', services[0].base_url); }()); (function test_remove() { @@ -64,11 +78,11 @@ assert.equal(bot_data.get('[email protected]').full_name, 'Bot 0'); bot_data.add(_.extend({}, test_bot, {is_active: true})); - bot = bot_data.get('[email protected]'); + bot = bot_data.get(43); assert.equal('Bot 1', bot.full_name); assert(bot.is_active); - bot_data.deactivate('[email protected]'); - bot = bot_data.get('[email protected]'); + bot_data.deactivate(43); + bot = bot_data.get(43); assert.equal(bot.is_active, false); }()); @@ -77,12 +91,12 @@ assert.equal(bot_data.get('[email protected]').full_name, 'Bot 0'); bot_data.add(_.extend({owner: '[email protected]'}, test_bot)); - bot = bot_data.get('[email protected]'); + bot = bot_data.get(43); assert(bot.can_admin); bot_data.add(_.extend({owner: '[email protected]'}, test_bot)); - bot = bot_data.get('[email protected]'); + bot = bot_data.get(43); assert.equal(false, bot.can_admin); }()); @@ -92,7 +106,7 @@ assert.equal(bot_data.get('[email protected]').full_name, 'Bot 0'); bot_data.add(test_bot); - bot = bot_data.get('[email protected]'); + bot = bot_data.get(43); assert(bot.can_admin); page_params.is_admin = false; @@ -101,9 +115,9 @@ assert.equal(bot_data.get('[email protected]').full_name, 'Bot 0'); (function test_get_editable() { var can_admin; - bot_data.add(_.extend({}, test_bot, {owner: '[email protected]', is_active: true})); - bot_data.add(_.extend({}, test_bot, {email: '[email protected]', owner: '[email protected]', is_active: true})); - bot_data.add(_.extend({}, test_bot, {email: '[email protected]', owner: '[email protected]', is_active: true})); + bot_data.add(_.extend({}, test_bot, {user_id: 44, owner: '[email protected]', is_active: true})); + bot_data.add(_.extend({}, test_bot, {user_id: 45, email: '[email protected]', owner: '[email protected]', is_active: true})); + bot_data.add(_.extend({}, test_bot, {user_id: 46, email: '[email protected]', owner: '[email protected]', is_active: true})); can_admin = _.pluck(bot_data.get_editable(), 'email'); assert.deepEqual(['[email protected]', '[email protected]'], can_admin); diff --git a/frontend_tests/node_tests/dispatch.js b/frontend_tests/node_tests/dispatch.js --- a/frontend_tests/node_tests/dispatch.js +++ b/frontend_tests/node_tests/dispatch.js @@ -214,6 +214,7 @@ var event_fixtures = { op: 'remove', bot: { email: '[email protected]', + user_id: '42', full_name: 'The Bot', }, }, @@ -585,8 +586,8 @@ with_overrides(function (override) { override('bot_data.deactivate', bot_stub.f); override('settings_users.update_user_data', admin_stub.f); dispatch(event); - var args = bot_stub.get_args('email'); - assert_same(args.email, event.bot.email); + var args = bot_stub.get_args('user_id'); + assert_same(args.user_id, event.bot.user_id); admin_stub.get_args('update_user_id', 'update_bot_data'); }); @@ -600,8 +601,8 @@ with_overrides(function (override) { dispatch(event); - var args = bot_stub.get_args('email', 'bot'); - assert_same(args.email, event.bot.email); + var args = bot_stub.get_args('user_id', 'bot'); + assert_same(args.user_id, event.bot.user_id); assert_same(args.bot, event.bot); args = admin_stub.get_args('update_user_id', 'update_bot_data'); diff --git a/zerver/tests/test_bots.py b/zerver/tests/test_bots.py --- a/zerver/tests/test_bots.py +++ b/zerver/tests/test_bots.py @@ -141,6 +141,7 @@ def test_add_bot(self) -> None: default_sending_stream=None, default_events_register_stream=None, default_all_public_streams=False, + services=[], owner=self.example_email('hamlet')) ), event['event'] @@ -302,6 +303,7 @@ def test_add_bot_with_default_sending_stream_private_allowed(self) -> None: default_sending_stream='Denmark', default_events_register_stream=None, default_all_public_streams=False, + services=[], owner=self.example_email('hamlet')) ), event['event'] @@ -369,6 +371,7 @@ def test_add_bot_with_default_events_register_stream_private_allowed(self) -> No default_sending_stream=None, default_events_register_stream='Denmark', default_all_public_streams=False, + services=[], owner=self.example_email('hamlet')) ), event['event'] @@ -932,6 +935,47 @@ def test_patch_bogus_bot(self) -> None: self.assert_json_error(result, 'No such user') self.assert_num_bots_equal(1) + def test_patch_outgoing_webhook_bot(self) -> None: + self.login(self.example_email('hamlet')) + bot_info = { + 'full_name': u'The Bot of Hamlet', + 'short_name': u'hambot', + 'bot_type': UserProfile.OUTGOING_WEBHOOK_BOT, + 'payload_url': ujson.dumps("http://foo.bar.com"), + 'service_interface': Service.GENERIC, + } + result = self.client_post("/json/bots", bot_info) + self.assert_json_success(result) + bot_info = { + 'service_payload_url': ujson.dumps("http://foo.bar2.com"), + 'service_interface': Service.SLACK, + } + result = self.client_patch("/json/bots/[email protected]", bot_info) + self.assert_json_success(result) + + service_interface = ujson.loads(result.content)['service_interface'] + self.assertEqual(service_interface, Service.SLACK) + + service_payload_url = ujson.loads(result.content)['service_payload_url'] + self.assertEqual(service_payload_url, "http://foo.bar2.com") + + def test_outgoing_webhook_invalid_interface(self): + # type: () -> None + self.login(self.example_email('hamlet')) + bot_info = { + 'full_name': 'Outgoing Webhook test bot', + 'short_name': 'outgoingservicebot', + 'bot_type': UserProfile.OUTGOING_WEBHOOK_BOT, + 'payload_url': ujson.dumps('http://127.0.0.1:5002/bots/followup'), + 'interface_type': -1, + } + result = self.client_post("/json/bots", bot_info) + self.assert_json_error(result, 'Invalid interface type') + + bot_info['interface_type'] = Service.GENERIC + result = self.client_post("/json/bots", bot_info) + self.assert_json_success(result) + def test_create_outgoing_webhook_bot(self, **extras: Any) -> None: self.login(self.example_email('hamlet')) bot_info = { diff --git a/zerver/tests/test_events.py b/zerver/tests/test_events.py --- a/zerver/tests/test_events.py +++ b/zerver/tests/test_events.py @@ -73,6 +73,7 @@ do_update_embedded_data, do_update_message, do_update_message_flags, + do_update_outgoing_webhook_service, do_update_pointer, do_update_user_presence, log_event, @@ -106,10 +107,11 @@ ) from zerver.lib.validator import ( check_bool, check_dict, check_dict_only, check_float, check_int, check_list, check_string, - equals, check_none_or, Validator + equals, check_none_or, Validator, check_url ) from zerver.views.events_register import _default_all_public_streams, _default_narrow +from zerver.views.users import add_service from zerver.tornado.event_queue import ( allocate_client_descriptor, @@ -1534,6 +1536,10 @@ def test_create_bot(self) -> None: ('default_all_public_streams', check_bool), ('avatar_url', check_string), ('owner', check_string), + ('services', check_list(check_dict_only([ # type: ignore # check_url doesn't completely fit the default validator spec, but is de facto working here. + ('base_url', check_url), + ('interface', check_int), + ]))), ])), ]) action = lambda: self.create_bot('[email protected]') @@ -1632,6 +1638,30 @@ def test_change_bot_owner(self) -> None: error = change_bot_owner_checker('events[0]', events[0]) self.assert_on_error(error) + def test_do_update_outgoing_webhook_service(self): + # type: () -> None + update_outgoing_webhook_service_checker = self.check_events_dict([ + ('type', equals('realm_bot')), + ('op', equals('update')), + ('bot', check_dict_only([ + ('email', check_string), + ('user_id', check_int), + ('services', check_list(check_dict_only([ # type: ignore # check_url doesn't completely fit the default validator spec, but is de facto working here. + ('base_url', check_url), + ('interface', check_int), + ]))), + ])), + ]) + self.user_profile = self.example_user('iago') + bot = do_create_user('[email protected]', '123', get_realm('zulip'), 'Test Bot', 'test', + bot_type=UserProfile.OUTGOING_WEBHOOK_BOT, bot_owner=self.user_profile) + add_service(user_profile=bot, name="test", base_url="http://hostname.domain1.com", + interface=1, token="abced") + action = lambda: do_update_outgoing_webhook_service(bot, 2, 'http://hostname.domain2.com') + events = self.do_test(action) + error = update_outgoing_webhook_service_checker('events[0]', events[0]) + self.assert_on_error(error) + def test_do_deactivate_user(self) -> None: bot_deactivate_checker = self.check_events_dict([ ('type', equals('realm_bot')), @@ -1664,6 +1694,10 @@ def test_do_reactivate_user(self) -> None: ('default_all_public_streams', check_bool), ('avatar_url', check_string), ('owner', check_none_or(check_string)), + ('services', check_list(check_dict_only([ # type: ignore # check_url doesn't completely fit the default validator spec, but is de facto working here. + ('base_url', check_url), + ('interface', check_int), + ]))), ])), ]) bot = self.create_bot('[email protected]') diff --git a/zerver/tests/test_home.py b/zerver/tests/test_home.py --- a/zerver/tests/test_home.py +++ b/zerver/tests/test_home.py @@ -183,7 +183,7 @@ def test_home(self) -> None: with patch('zerver.lib.cache.cache_set') as cache_mock: result = self._get_home_page(stream='Denmark') - self.assert_length(queries, 41) + self.assert_length(queries, 42) self.assert_length(cache_mock.call_args_list, 7) html = result.content.decode('utf-8') @@ -211,6 +211,7 @@ def test_home(self) -> None: 'full_name', 'is_active', 'owner', + 'services', 'user_id', ]
Allow editing of outgoing webhook URLs The "Edit bot" form doesn't provide a way to change the outgoing webhook URL for a bot. This means that changing a URL requires deleting a bot and re-creating. But zulip's UI doesn't allow deletion of bot... which means the only workaround is "suspending" a bot and creating a new one with a different id. That means the bot's name can't match its id, which leads to confusion (e.g. sending a message to `@xkcd` which refers to `some-other-named-bot@domain`). ![image](https://user-images.githubusercontent.com/313089/32889343-8cecbbce-ca98-11e7-84e2-19066f4a8498.png)
Hello @zulip/server-bots, @zulip/server-settings members, this issue was labeled with the **area: bots**, **area: settings UI** labels, so you may want to check it out! Yeah, this is a good issue. We have #5665 for this, but it would need to be rebase and refactored a bit.
2018-01-16T19:46:12
zulip/zulip
8,086
zulip__zulip-8086
[ "7550" ]
8530ed0b5e8ee8f62bceb5202ecf3e69c7bcf210
diff --git a/zerver/lib/bugdown/__init__.py b/zerver/lib/bugdown/__init__.py --- a/zerver/lib/bugdown/__init__.py +++ b/zerver/lib/bugdown/__init__.py @@ -253,13 +253,31 @@ def add_embed(root: Element, link: Text, extracted_data: Dict[Text, Any]) -> Non a.set("target", "_blank") a.set("title", title) a.text = title - description = extracted_data.get('description') if description: description_elm = markdown.util.etree.SubElement(data_container, "div") description_elm.set("class", "message_embed_description") description_elm.text = description +def add_vimeo_preview(root: Element, link: Text, extracted_data: Dict[Text, Any], vm_id: Text) -> None: + container = markdown.util.etree.SubElement(root, "div") + container.set("class", "vimeo-video message_inline_image") + + img_link = extracted_data.get('image') + if img_link: + parsed_img_link = urllib.parse.urlparse(img_link) + # Append domain where relative img_link url is given + if not parsed_img_link.netloc: + parsed_url = urllib.parse.urlparse(link) + domain = '{url.scheme}://{url.netloc}/'.format(url=parsed_url) + img_link = urllib.parse.urljoin(domain, img_link) + anchor = markdown.util.etree.SubElement(container, "a") + anchor.set("href", link) + anchor.set("target", "_blank") + anchor.set("data-id", vm_id) + anchor.set("title", link) + img = markdown.util.etree.SubElement(anchor, "img") + img.set("src", img_link) @cache_with_key(lambda tweet_id: tweet_id, cache_name="database", with_statsd_key="tweet_data") def fetch_tweet_data(tweet_id: Text) -> Optional[Dict[Text, Any]]: @@ -327,13 +345,11 @@ def fetch_open_graph_image(url: Text) -> Optional[Dict[str, Any]]: # a closing tag if it has not been closed yet. last_closed = True head = [] - # TODO: What if response content is huge? Should we get headers first? try: content = requests.get(url, timeout=1).text except Exception: return None - # Extract the head and meta tags # All meta tags are self closing, have no children or are closed # automatically. @@ -529,6 +545,27 @@ def youtube_image(self, url: Text) -> Optional[Text]: return "https://i.ytimg.com/vi/%s/default.jpg" % (yt_id,) return None + def vimeo_id(self, url: Text) -> Optional[Text]: + if not image_preview_enabled_for_realm(): + return None + #(http|https)?:\/\/(www\.)?vimeo.com\/(?:channels\/(?:\w+\/)?|groups\/([^\/]*)\/videos\/|)(\d+)(?:|\/\?) + # If it matches, match.group('id') is the video id. + + vimeo_re = r'^((http|https)?:\/\/(www\.)?vimeo.com\/' + \ + r'(?:channels\/(?:\w+\/)?|groups\/' + \ + r'([^\/]*)\/videos\/|)(\d+)(?:|\/\?))$' + match = re.match(vimeo_re, url) + if match is None: + return None + return match.group(5) + + def vimeo_image(self, url: Text) -> Optional[Text]: + vm_id = self.vimeo_id(url) + + if vm_id is not None: + return "http://i.vimeocdn.com/video/%s.jpg" % (vm_id,) + return None + def twitter_text(self, text: Text, urls: List[Dict[Text, Text]], user_mentions: List[Dict[Text, Any]], @@ -841,8 +878,14 @@ def run(self, root: Element) -> None: except NotFoundInCache: current_message.links_for_preview.add(url) continue + vimeo = self.vimeo_image(url) if extracted_data: - add_embed(root, url, extracted_data) + if vimeo is not None: + vm_id = self.vimeo_id(url) + add_vimeo_preview(root, url, extracted_data, vm_id) + continue + else: + add_embed(root, url, extracted_data) class Avatar(markdown.inlinepatterns.Pattern):
diff --git a/zerver/tests/test_bugdown.py b/zerver/tests/test_bugdown.py --- a/zerver/tests/test_bugdown.py +++ b/zerver/tests/test_bugdown.py @@ -290,6 +290,17 @@ def test_inline_youtube(self) -> None: self.assertEqual(converted, '<p><a href="http://www.youtube.com/watch?v=hx1mjT73xYE" target="_blank" title="http://www.youtube.com/watch?v=hx1mjT73xYE">http://www.youtube.com/watch?v=hx1mjT73xYE</a></p>\n<div class="youtube-video message_inline_image"><a data-id="hx1mjT73xYE" href="http://www.youtube.com/watch?v=hx1mjT73xYE" target="_blank" title="http://www.youtube.com/watch?v=hx1mjT73xYE"><img src="https://i.ytimg.com/vi/hx1mjT73xYE/default.jpg"></a></div>') + def test_inline_vimeo(self) -> None: + msg = 'Check out the debate: https://vimeo.com/246979354' + converted = bugdown_convert(msg) + + self.assertEqual(converted, '<p>Check out the debate: <a href="https://vimeo.com/246979354" target="_blank" title="https://vimeo.com/246979354">https://vimeo.com/246979354</a></p>') + + msg = 'https://vimeo.com/246979354' + converted = bugdown_convert(msg) + + self.assertEqual(converted, '<p><a href="https://vimeo.com/246979354" target="_blank" title="https://vimeo.com/246979354">https://vimeo.com/246979354</a></p>') + @override_settings(INLINE_IMAGE_PREVIEW=True) def test_inline_image_preview(self) -> None: with_preview = '<p>Test: <a href="http://cdn.wallpapersafari.com/13/6/16eVjx.jpeg" target="_blank" title="http://cdn.wallpapersafari.com/13/6/16eVjx.jpeg">http://cdn.wallpapersafari.com/13/6/16eVjx.jpeg</a></p>\n<div class="message_inline_image"><a href="http://cdn.wallpapersafari.com/13/6/16eVjx.jpeg" target="_blank" title="http://cdn.wallpapersafari.com/13/6/16eVjx.jpeg"><img src="https://external-content.zulipcdn.net/389b5d7148a0cbc7475ed564e1b03ceb476bdacb/687474703a2f2f63646e2e77616c6c70617065727361666172692e636f6d2f31332f362f313665566a782e6a706567"></a></div>'
bugdown: Add preview for vimeo videos. If you search for youtube in `zerver/lib/bugdown/__init__.py`, you can see the code that does a special preview for youtube videos. We should have similar functionality for vimeo. You can test the youtube functionality by sending a youtube link to "#test here" on chat.zulip.org. Note also the youtube links are about to change; if you take this issue before #7549 is resolved, follow the new format (with the name) rather than the old format (with the visible link). Make sure to add appropriate tests for this. You can search for (and mostly copy) the youtube tests by doing `git grep -i youtube zerver/tests/test_bugdown.py`.
Hello @zulip/server-markdown members, this issue was labeled with the **area: markdown** label, so you may want to check it out! <!-- areaLabelNotification --> @zulipbot claim Welcome to Zulip, @nax3n! We just sent you an invite to collaborate on this repository at https://github.com/zulip/zulip/invitations. Please accept this invite in order to claim this issue and begin a fun, rewarding experience contributing to Zulip! Here's some tips to get you off to a good start: * Join me on the [Zulip developers' server](https://chat.zulip.org), to get help, chat about this issue, and meet the other developers. * Sign the [Dropbox Contributor License Agreement](https://opensource.dropbox.com/cla/), so that Zulip can use your code. * [Unwatch this repository](https://help.github.com/articles/unwatching-repositories/), so that you don't get 100 emails a day. As you work on this issue, you'll also want to refer to the [Zulip code contribution guide](http://zulip.readthedocs.io/en/latest/contributing/index.html), as well as the rest of the developer documentation on that site. See you on the other side (that is, the pull request side)! Hello @nax3n, you have been unassigned from this issue because you have not updated this issue or any referenced pull requests for over 14 days. You can reclaim this issue or claim any other issue by commenting `@zulipbot claim` on that issue. Thanks for your contributions, and hope to see you again soon! @zulipbot claim
2018-01-16T20:04:26
zulip/zulip
8,134
zulip__zulip-8134
[ "6493" ]
953478a18908d77e9901e24bebe9fbe99c968774
diff --git a/zerver/lib/actions.py b/zerver/lib/actions.py --- a/zerver/lib/actions.py +++ b/zerver/lib/actions.py @@ -3558,6 +3558,7 @@ def do_update_message(user_profile: UserProfile, message: Message, topic_name: O event["content"] = content event["rendered_content"] = rendered_content event['prev_rendered_content_version'] = message.rendered_content_version + event['is_me_message'] = Message.is_status_message(content, rendered_content) prev_content = edit_history_event['prev_content'] if Message.content_has_attachment(prev_content) or Message.content_has_attachment(message.content):
diff --git a/frontend_tests/node_tests/compose.js b/frontend_tests/node_tests/compose.js --- a/frontend_tests/node_tests/compose.js +++ b/frontend_tests/node_tests/compose.js @@ -1281,6 +1281,14 @@ function test_with_mock_socket(test_params) { }; } + function setup_mock_markdown_is_status_message(msg_content, msg_rendered, return_val) { + markdown.is_status_message = function (content, rendered) { + assert.equal(content, msg_content); + assert.equal(rendered, msg_rendered); + return return_val; + }; + } + function test_post_success(success_callback) { var resp = { rendered: 'Server: foobarfoobar', @@ -1338,6 +1346,7 @@ function test_with_mock_socket(test_params) { $("#compose-textarea").val('```foobarfoobar```'); setup_visibilities(); setup_mock_markdown_contains_backend_only_syntax('```foobarfoobar```', true); + setup_mock_markdown_is_status_message('```foobarfoobar```', 'Server: foobarfoobar', false); loading.make_indicator = function (spinner) { assert.equal(spinner, $("#markdown_preview_spinner")); make_indicator_called = true; @@ -1353,6 +1362,7 @@ function test_with_mock_socket(test_params) { $("#compose-textarea").val('foobarfoobar'); setup_visibilities(); setup_mock_markdown_contains_backend_only_syntax('foobarfoobar', false); + setup_mock_markdown_is_status_message('foobarfoobar', 'Server: foobarfoobar', false); mock_channel_post('foobarfoobar'); markdown.apply_markdown = function (msg) { assert.equal(msg.raw_content, 'foobarfoobar'); diff --git a/frontend_tests/node_tests/markdown.js b/frontend_tests/node_tests/markdown.js --- a/frontend_tests/node_tests/markdown.js +++ b/frontend_tests/node_tests/markdown.js @@ -351,6 +351,12 @@ var bugdown_data = JSON.parse(fs.readFileSync(path.join(__dirname, '../../zerver assert.equal(message.is_me_message, true); assert(!message.unread); + input = "/me is testing\nthis"; + message = {subject: "No links here", raw_content: input}; + markdown.apply_markdown(message); + + assert.equal(message.is_me_message, false); + input = "testing this @**all** @**Cordelia Lear**"; message = {subject: "No links here", raw_content: input}; markdown.apply_markdown(message); diff --git a/zerver/tests/test_bugdown.py b/zerver/tests/test_bugdown.py --- a/zerver/tests/test_bugdown.py +++ b/zerver/tests/test_bugdown.py @@ -738,6 +738,14 @@ def test_is_status_message(self) -> None: ) self.assertTrue(Message.is_status_message(content, rendered_content)) + content = '/me writes a second line\nline' + rendered_content = render_markdown(msg, content) + self.assertEqual( + rendered_content, + '<p>/me writes a second line<br>\nline</p>' + ) + self.assertFalse(Message.is_status_message(content, rendered_content)) + def test_alert_words(self) -> None: user_profile = self.example_user('othello') do_set_alert_words(user_profile, ["ALERTWORD", "scaryword"]) diff --git a/zerver/tests/test_events.py b/zerver/tests/test_events.py --- a/zerver/tests/test_events.py +++ b/zerver/tests/test_events.py @@ -679,6 +679,7 @@ def get_checker(check_gravatar: Validator) -> Validator: ('subject', check_string), ('subject_links', check_list(None)), ('user_id', check_int), + ('is_me_message', check_bool), ]) message = Message.objects.order_by('-id')[0]
Message displays weird behaviour when using /me, newline, and quote mark after editing When I send a message and edit it with text that contains `/me`, `newline`, and quote mark, the message displays weird behaviour. See https://chat.zulip.org/#narrow/stream/issues/topic/end.20of.20message.20feed for further information. ![meedit](https://user-images.githubusercontent.com/20320125/30358231-77b9ad3c-986d-11e7-99f0-cbeecb659557.gif) How to reproduce: 1. Create a message, the content is up to you. For example: ``` up to you ``` 2. Send the message. 3. Edit the message, now change the content into something that contains `/me`, `newline`, and quotemark. For example: ![screenshot_20170913_101940](https://user-images.githubusercontent.com/20320125/30358173-15c97c4c-986d-11e7-8ba5-61ed40f1d5ad.png) 4. Save the message Then you will see the message displays weird behaviour, like this: ![screenshot_20170913_132552](https://user-images.githubusercontent.com/20320125/30362633-232c0188-9887-11e7-88de-ffcea7928727.png) This weird behaviour becomes normal when the user reloads the page. Expected behaviour: The message shouldn't display as `/me` message, it should be displayed as a normal message.
@zulipbot claim Welcome to Zulip, @msdinit! We just sent you an invite to collaborate on this repository at https://github.com/zulip/zulip/invitations. Please accept this invite in order to claim this issue and begin a fun, rewarding experience contributing to Zulip! Here's some tips to get you off to a good start: * Join me on the [Zulip developers' server](https://chat.zulip.org), to get help, chat about this issue, and meet the other developers. * Sign the [Dropbox Contributor License Agreement](https://opensource.dropbox.com/cla/), so that Zulip can use your code. * [Unwatch this repository](https://help.github.com/articles/unwatching-repositories/), so that you don't get 100 emails a day. As you work on this issue, you'll also want to refer to the [Zulip code contribution guide](http://zulip.readthedocs.io/en/latest/index.html#code-contribution-guide), as well as the rest of the developer documentation on that site. See you on the other side (that is, the pull request side)! Hello @msdinit, you have been unassigned from this issue because you have not updated this issue or any referenced pull requests for over 10 days. You can reclaim this issue or claim any other issue by commenting `@zulipbot claim` on that issue. Thanks for your contributions, and hope to see you again soon! @zulipbot claim Hello @wjkg, you claimed this issue to work on it, but this issue and any referenced pull requests haven't been updated for 10 days. Are you still working on this issue? If so, please update this issue by leaving a comment on this issue to let me know that you're still working on it. Otherwise, I'll automatically remove you from this issue in 4 days. If you've decided to work on something else, simply comment `@zulipbot abandon` so that someone else can claim it and continue from where you left off. Thank you for your valuable contributions to Zulip! <!-- inactiveWarning -->
2018-01-21T19:33:31
zulip/zulip
8,397
zulip__zulip-8397
[ "6802" ]
cab3c8c32c2529f6027a2e4f0ad045a8b2f2a27b
diff --git a/zerver/lib/integrations.py b/zerver/lib/integrations.py --- a/zerver/lib/integrations.py +++ b/zerver/lib/integrations.py @@ -294,6 +294,7 @@ def __init__(self, name: str, *args: Any, **kwargs: Any) -> None: ), WebhookIntegration('dropbox', ['productivity'], display_name='Dropbox'), WebhookIntegration('freshdesk', ['customer-support']), + WebhookIntegration('front', ['customer-support'], display_name='Front'), GithubIntegration( 'github', ['version-control'], diff --git a/zerver/webhooks/front/__init__.py b/zerver/webhooks/front/__init__.py new file mode 100644 diff --git a/zerver/webhooks/front/view.py b/zerver/webhooks/front/view.py new file mode 100644 --- /dev/null +++ b/zerver/webhooks/front/view.py @@ -0,0 +1,188 @@ +from typing import Any, Dict, Optional, Text, Tuple + +from django.http import HttpRequest, HttpResponse +from django.utils.translation import ugettext as _ + +from zerver.decorator import api_key_only_webhook_view +from zerver.lib.actions import check_send_stream_message +from zerver.lib.request import REQ, has_request_variables +from zerver.lib.response import json_error, json_success +from zerver.models import UserProfile + +def get_message_data(payload: Dict[Text, Any]) -> Optional[Tuple[Text, Text, Text, Text]]: + try: + link = "https://app.frontapp.com/open/" + payload['target']['data']['id'] + outbox = payload['conversation']['recipient']['handle'] + inbox = payload['source']['data'][0]['address'] + subject = payload['conversation']['subject'] + except KeyError: + return None + + return link, outbox, inbox, subject + +def get_source_name(payload: Dict[Text, Any]) -> Optional[Text]: + try: + first_name = payload['source']['data']['first_name'] + last_name = payload['source']['data']['last_name'] + except KeyError: + return None + + return "%s %s" % (first_name, last_name) + +def get_target_name(payload: Dict[Text, Any]) -> Optional[Text]: + try: + first_name = payload['target']['data']['first_name'] + last_name = payload['target']['data']['last_name'] + except KeyError: + return None + + return "%s %s" % (first_name, last_name) + +def get_comment(payload: Dict[Text, Any]) -> Optional[Text]: + try: + comment = payload['target']['data']['body'] + except KeyError: + return None + + return comment + +def get_tag(payload: Dict[Text, Any]) -> Optional[Text]: + try: + tag = payload['target']['data']['name'] + except KeyError: + return None + + return tag + +@api_key_only_webhook_view('Front') +@has_request_variables +def api_front_webhook(request: HttpRequest, user_profile: UserProfile, + payload: Dict[Text, Any]=REQ(argument_type='body'), + stream: Text=REQ(default='front'), + topic: Optional[Text]=REQ(default='cnv_id')) -> HttpResponse: + try: + event_type = payload['type'] + conversation_id = payload['conversation']['id'] + except KeyError: + return json_error(_("Missing required data")) + + # Each topic corresponds to a separate conversation in Front. + topic = conversation_id + + # Inbound message + if event_type == 'inbound': + message_data = get_message_data(payload) + if not message_data: + return json_error(_("Missing required data")) + + link, outbox, inbox, subject = message_data + body = "[Inbound message]({link}) from **{outbox}** to **{inbox}**.\n" \ + "```quote\n*Subject*: {subject}\n```" \ + .format(link=link, outbox=outbox, inbox=inbox, subject=subject) + + # Outbound message + elif event_type == 'outbound': + message_data = get_message_data(payload) + if not message_data: + return json_error(_("Missing required data")) + + link, outbox, inbox, subject = message_data + body = "[Outbound message]({link}) from **{inbox}** to **{outbox}**.\n" \ + "```quote\n*Subject*: {subject}\n```" \ + .format(link=link, inbox=inbox, outbox=outbox, subject=subject) + + # Outbound reply + elif event_type == 'out_reply': + message_data = get_message_data(payload) + if not message_data: + return json_error(_("Missing required data")) + + link, outbox, inbox, subject = message_data + body = "[Outbound reply]({link}) from **{inbox}** to **{outbox}**." \ + .format(link=link, inbox=inbox, outbox=outbox) + + # Comment or mention + elif event_type == 'comment' or event_type == 'mention': + name, comment = get_source_name(payload), get_comment(payload) + if not (name and comment): + return json_error(_("Missing required data")) + + body = "**{name}** left a comment:\n```quote\n{comment}\n```" \ + .format(name=name, comment=comment) + + # Conversation assigned + elif event_type == 'assign': + source_name = get_source_name(payload) + target_name = get_target_name(payload) + + if not (source_name and target_name): + return json_error(_("Missing required data")) + + if source_name == target_name: + body = "**{source_name}** assigned themselves." \ + .format(source_name=source_name) + else: + body = "**{source_name}** assigned **{target_name}**." \ + .format(source_name=source_name, target_name=target_name) + + # Conversation unassigned + elif event_type == 'unassign': + name = get_source_name(payload) + if not name: + return json_error(_("Missing required data")) + + body = "Unassined by **{name}**.".format(name=name) + + # Conversation archived + elif event_type == 'archive': + name = get_source_name(payload) + if not name: + return json_error(_("Missing required data")) + + body = "Archived by **{name}**.".format(name=name) + + # Conversation reopened + elif event_type == 'reopen': + name = get_source_name(payload) + if not name: + return json_error(_("Missing required data")) + + body = "Reopened by **{name}**.".format(name=name) + + # Conversation deleted + elif event_type == 'trash': + name = get_source_name(payload) + if not name: + return json_error(_("Missing required data")) + + body = "Deleted by **{name}**.".format(name=name) + + # Conversation restored + elif event_type == 'restore': + name = get_source_name(payload) + if not name: + return json_error(_("Missing required data")) + + body = "Restored by **{name}**.".format(name=name) + + # Conversation tagged + elif event_type == 'tag': + name, tag = get_source_name(payload), get_tag(payload) + if not (name and tag): + return json_error(_("Missing required data")) + + body = "**{name}** added tag **{tag}**.".format(name=name, tag=tag) + + # Conversation untagged + elif event_type == 'untag': + name, tag = get_source_name(payload), get_tag(payload) + if not (name and tag): + return json_error(_("Missing required data")) + + body = "**{name}** removed tag **{tag}**.".format(name=name, tag=tag) + else: + return json_error(_("Unknown webhook request")) + + check_send_stream_message(user_profile, request.client, stream, topic, body) + + return json_success()
diff --git a/zerver/webhooks/front/tests.py b/zerver/webhooks/front/tests.py new file mode 100644 --- /dev/null +++ b/zerver/webhooks/front/tests.py @@ -0,0 +1,282 @@ +from typing import Text +import ujson + +from zerver.lib.test_classes import WebhookTestCase + +class FrontHookTests(WebhookTestCase): + STREAM_NAME = 'front' + URL_TEMPLATE = "/api/v1/external/front?&api_key={api_key}" + FIXTURE_DIR_NAME = 'front' + + def _test_no_message_data(self, fixture_name: Text) -> None: + payload = self.get_body(fixture_name) + payload_json = ujson.loads(payload) + del payload_json['conversation']['subject'] + result = self.client_post(self.url, ujson.dumps(payload_json), + content_type="application/x-www-form-urlencoded") + + self.assert_json_error(result, "Missing required data") + + def _test_no_source_name(self, fixture_name: Text) -> None: + payload = self.get_body(fixture_name) + payload_json = ujson.loads(payload) + del payload_json['source']['data']['first_name'] + result = self.client_post(self.url, ujson.dumps(payload_json), + content_type="application/x-www-form-urlencoded") + + self.assert_json_error(result, "Missing required data") + + def _test_no_target_name(self, fixture_name: Text) -> None: + payload = self.get_body(fixture_name) + payload_json = ujson.loads(payload) + del payload_json['target']['data']['first_name'] + result = self.client_post(self.url, ujson.dumps(payload_json), + content_type="application/x-www-form-urlencoded") + + self.assert_json_error(result, "Missing required data") + + def _test_no_comment(self, fixture_name: Text) -> None: + payload = self.get_body(fixture_name) + payload_json = ujson.loads(payload) + del payload_json['target']['data']['body'] + result = self.client_post(self.url, ujson.dumps(payload_json), + content_type="application/x-www-form-urlencoded") + + self.assert_json_error(result, "Missing required data") + + def _test_no_tag(self, fixture_name: Text) -> None: + payload = self.get_body(fixture_name) + payload_json = ujson.loads(payload) + del payload_json['target']['data']['name'] + result = self.client_post(self.url, ujson.dumps(payload_json), + content_type="application/x-www-form-urlencoded") + + self.assert_json_error(result, "Missing required data") + + def test_no_event_type(self) -> None: + payload = self.get_body('1_conversation_assigned_outbound') + payload_json = ujson.loads(payload) + del payload_json['type'] + result = self.client_post(self.url, ujson.dumps(payload_json), + content_type="application/x-www-form-urlencoded") + + self.assert_json_error(result, "Missing required data") + + def test_no_conversation_id(self) -> None: + payload = self.get_body('1_conversation_assigned_outbound') + payload_json = ujson.loads(payload) + del payload_json['conversation']['id'] + result = self.client_post(self.url, ujson.dumps(payload_json), + content_type="application/x-www-form-urlencoded") + + self.assert_json_error(result, "Missing required data") + + # Scenario 1: Conversation starts from an outbound message. + + # Conversation automatically assigned to a teammate who started it. + def test_conversation_assigned_outbound(self) -> None: + expected_subject = 'cnv_keo696' + expected_message = "**Leela Turanga** assigned themselves." + + self.send_and_test_stream_message('1_conversation_assigned_outbound', + expected_subject, + expected_message, + content_type="application/x-www-form-urlencoded") + + def test_outbound_message(self) -> None: + expected_subject = 'cnv_keo696' + expected_message = "[Outbound message](https://app.frontapp.com/open/msg_1176ie2) " \ + "from **[email protected]** " \ + "to **[email protected]**.\n" \ + "```quote\n*Subject*: Your next delivery is on Epsilon 96Z\n```" + + self.send_and_test_stream_message('2_outbound_message', + expected_subject, + expected_message, + content_type="application/x-www-form-urlencoded") + + def test_outbound_message_error(self) -> None: + self._test_no_message_data('2_outbound_message') + + def test_conversation_archived(self) -> None: + expected_subject = 'cnv_keo696' + expected_message = "Archived by **Leela Turanga**." + + self.send_and_test_stream_message('3_conversation_archived', + expected_subject, + expected_message, + content_type="application/x-www-form-urlencoded") + + def test_conversation_archived_error(self) -> None: + self._test_no_source_name('3_conversation_archived') + + def test_conversation_reopened(self) -> None: + expected_subject = 'cnv_keo696' + expected_message = "Reopened by **Leela Turanga**." + + self.send_and_test_stream_message('4_conversation_reopened', + expected_subject, + expected_message, + content_type="application/x-www-form-urlencoded") + + def test_conversation_reopened_error(self) -> None: + self._test_no_source_name('4_conversation_reopened') + + def test_conversation_deleted(self) -> None: + expected_subject = 'cnv_keo696' + expected_message = "Deleted by **Leela Turanga**." + + self.send_and_test_stream_message('5_conversation_deleted', + expected_subject, + expected_message, + content_type="application/x-www-form-urlencoded") + + def test_conversation_deleted_error(self) -> None: + self._test_no_source_name('5_conversation_deleted') + + def test_conversation_restored(self) -> None: + expected_subject = 'cnv_keo696' + expected_message = "Restored by **Leela Turanga**." + + self.send_and_test_stream_message('6_conversation_restored', + expected_subject, + expected_message, + content_type="application/x-www-form-urlencoded") + + def test_conversation_restored_error(self) -> None: + self._test_no_source_name('6_conversation_restored') + + def test_conversation_unassigned(self) -> None: + expected_subject = 'cnv_keo696' + expected_message = "Unassined by **Leela Turanga**." + + self.send_and_test_stream_message('7_conversation_unassigned', + expected_subject, + expected_message, + content_type="application/x-www-form-urlencoded") + + def test_conversation_unassigned_error(self) -> None: + self._test_no_source_name('7_conversation_unassigned') + + def test_mention_all(self) -> None: + expected_subject = 'cnv_keo696' + expected_message = "**Leela Turanga** left a comment:\n" \ + "```quote\n@all Could someone else take this?\n```" + + self.send_and_test_stream_message('8_mention_all', + expected_subject, + expected_message, + content_type="application/x-www-form-urlencoded") + + # Scenario 2: Conversation starts from an inbound message. + + def test_inbound_message(self) -> None: + expected_subject = 'cnv_keocka' + expected_message = "[Inbound message](https://app.frontapp.com/open/msg_1176r8y) " \ + "from **[email protected]** " \ + "to **[email protected]**.\n" \ + "```quote\n*Subject*: Being a robot is great, but...\n```" + + self.send_and_test_stream_message('9_inbound_message', + expected_subject, + expected_message, + content_type="application/x-www-form-urlencoded") + + def test_inbound_message_error(self) -> None: + self._test_no_message_data('9_inbound_message') + + def test_conversation_tagged(self) -> None: + expected_subject = 'cnv_keocka' + expected_message = "**Leela Turanga** added tag **Urgent**." + + self.send_and_test_stream_message('10_conversation_tagged', + expected_subject, + expected_message, + content_type="application/x-www-form-urlencoded") + + def test_conversation_tagged_error(self) -> None: + self._test_no_tag('10_conversation_tagged') + + # Conversation automatically assigned to a teammate who replied to it. + def test_conversation_assigned_reply(self) -> None: + expected_subject = 'cnv_keocka' + expected_message = "**Leela Turanga** assigned themselves." + + self.send_and_test_stream_message('11_conversation_assigned_reply', + expected_subject, + expected_message, + content_type="application/x-www-form-urlencoded") + + def test_outbound_reply(self) -> None: + expected_subject = 'cnv_keocka' + expected_message = "[Outbound reply](https://app.frontapp.com/open/msg_1176ryy) " \ + "from **[email protected]** " \ + "to **[email protected]**." + + self.send_and_test_stream_message('12_outbound_reply', + expected_subject, + expected_message, + content_type="application/x-www-form-urlencoded") + + def test_outbound_reply_error(self) -> None: + self._test_no_message_data('12_outbound_reply') + + def test_conversation_untagged(self) -> None: + expected_subject = 'cnv_keocka' + expected_message = "**Leela Turanga** removed tag **Urgent**." + + self.send_and_test_stream_message('13_conversation_untagged', + expected_subject, + expected_message, + content_type="application/x-www-form-urlencoded") + + def test_conversation_untagged_error(self) -> None: + self._test_no_tag('13_conversation_untagged') + + def test_mention(self) -> None: + expected_subject = 'cnv_keocka' + expected_message = "**Leela Turanga** left a comment:\n" \ + "```quote\n@bender Could you take it from here?\n```" + + self.send_and_test_stream_message('14_mention', + expected_subject, + expected_message, + content_type="application/x-www-form-urlencoded") + + def test_comment(self) -> None: + expected_subject = 'cnv_keocka' + expected_message = "**Bender Rodriguez** left a comment:\n" \ + "```quote\nSure.\n```" + + self.send_and_test_stream_message('15_comment', + expected_subject, + expected_message, + content_type="application/x-www-form-urlencoded") + + def test_comment_error(self) -> None: + self._test_no_comment('15_comment') + + # Conversation manually assigned to another teammate. + def test_conversation_assigned(self) -> None: + expected_subject = 'cnv_keocka' + expected_message = "**Leela Turanga** assigned **Bender Rodriguez**." + + self.send_and_test_stream_message('16_conversation_assigned', + expected_subject, + expected_message, + content_type="application/x-www-form-urlencoded") + + def test_conversation_assigned_error(self) -> None: + self._test_no_target_name('16_conversation_assigned') + + def test_unknown_webhook_request(self) -> None: + payload = self.get_body('16_conversation_assigned') + payload_json = ujson.loads(payload) + payload_json['type'] = 'qwerty' + result = self.client_post(self.url, ujson.dumps(payload_json), + content_type="application/x-www-form-urlencoded") + + self.assert_json_error(result, "Unknown webhook request") + + def get_body(self, fixture_name: Text) -> Text: + return self.fixture_data('front', fixture_name, file_type="json")
integrations: Add a webhook integration for Front. This would be a great project for a new contributor. We have a guide for writing webhook integrations at http://zulip.readthedocs.io/en/latest/integration-guide.html#webhook-integrations. Front is at frontapp.com.
@zulipbot claim Hello @dgarvit, you claimed this issue to work on it, but this issue and any referenced pull requests haven't been updated for 7 days. Are you still working on this issue? If so, please update this issue by leaving a comment on this issue to let me know that you're still working on it. Otherwise, I'll automatically remove you from this issue in 3 days. If you've decided to work on something else, simply comment `@zulipbot abandon` so that someone else can claim it and continue from where you left off. Thank you for your valuable contributions to Zulip! @zulipbot abandon Hey @rishig i want to do this ...but it's my first time so i need a little guidance.I know python and read about webhook integration from the docs..I'm hoping this can be my first contribution to zulip as you wrote that it's good for beginners.. @zulipbot claim Welcome to Zulip, @Rishabh570! We just sent you an invite to collaborate on this repository at https://github.com/zulip/zulip/invitations. Please accept this invite in order to claim this issue and begin a fun, rewarding experience contributing to Zulip! Here's some tips to get you off to a good start: * Join me on the [Zulip developers' server](https://chat.zulip.org), to get help, chat about this issue, and meet the other developers. * Sign the [Dropbox Contributor License Agreement](https://opensource.dropbox.com/cla/), so that Zulip can use your code. * [Unwatch this repository](https://help.github.com/articles/unwatching-repositories/), so that you don't get 100 emails a day. As you work on this issue, you'll also want to refer to the [Zulip code contribution guide](http://zulip.readthedocs.io/en/latest/index.html#code-contribution-guide), as well as the rest of the developer documentation on that site. See you on the other side (that is, the pull request side)! Great, and welcome! https://github.com/zulip/zulip-gci/blob/master/tasks/webhook-integrations.md might be helpful as well. I'm not getting exactly what info i need to show up in my view.py... I'd recommend starting with the hello world example in the link I posted. Let's also move this to chat.zulip.org #"development help" stream, where more people will see it. I've been through hello world example...but the fixture there was small and concise so it was pretty understandable that which info is to be extracted from it...but in case of front the json file is very big as compared to helloworld and i'm being confused... Thnx BTW... @Rishabh570 Are you still working on it? @sagar-kalra Yes...sorry for delay, got some error to be fixed... Hello @Rishabh570, you claimed this issue to work on it, but this issue and any referenced pull requests haven't been updated for 7 days. Are you still working on this issue? If so, please update this issue by leaving a comment on this issue to let me know that you're still working on it. Otherwise, I'll automatically remove you from this issue in 3 days. If you've decided to work on something else, simply comment `@zulipbot abandon` so that someone else can claim it and continue from where you left off. Thank you for your valuable contributions to Zulip! Yes...working on it..about two or three days i need at max cuz my exams are coming. Hello @Rishabh570, you claimed this issue to work on it, but this issue and any referenced pull requests haven't been updated for 7 days. Are you still working on this issue? If so, please update this issue by leaving a comment on this issue to let me know that you're still working on it. Otherwise, I'll automatically remove you from this issue in 3 days. If you've decided to work on something else, simply comment `@zulipbot abandon` so that someone else can claim it and continue from where you left off. Thank you for your valuable contributions to Zulip! yes...working on it...opened up a WIP PR #7245 ..it will done within this week most probably !!! Hello @Rishabh570, you claimed this issue to work on it, but this issue and any referenced pull requests haven't been updated for 10 days. Are you still working on this issue? If so, please update this issue by leaving a comment on this issue to let me know that you're still working on it. Otherwise, I'll automatically remove you from this issue in 4 days. If you've decided to work on something else, simply comment `@zulipbot abandon` so that someone else can claim it and continue from where you left off. Thank you for your valuable contributions to Zulip! <!-- inactiveWarning --> Working...Actually delay is happening due to my exams in 1st and 2nd week of Dec..But will try to manage time for it..!!! Hello @Rishabh570, you claimed this issue to work on it, but this issue and any referenced pull requests haven't been updated for 10 days. Are you still working on this issue? If so, please update this issue by leaving a comment on this issue to let me know that you're still working on it. Otherwise, I'll automatically remove you from this issue in 4 days. If you've decided to work on something else, simply comment `@zulipbot abandon` so that someone else can claim it and continue from where you left off. Thank you for your valuable contributions to Zulip! <!-- inactiveWarning --> Little busy...but will get it done soon...! I'm done working on it...PR is #7245 Hello @Rishabh570, you have been unassigned from this issue because you have not updated this issue or any referenced pull requests for over 14 days. You can reclaim this issue or claim any other issue by commenting `@zulipbot claim` on that issue. Thanks for your contributions, and hope to see you again soon! @zulipbot claim Hello @Rishabh570, it looks like you've currently claimed 1 issue in this repository. We encourage new contributors to focus their efforts on at most 1 issue at a time, so please complete your work on your other claimed issues before trying to claim this issue again. We look forward to your valuable contributions! @zulipbot claim Welcome to Zulip, @RavicharanN! We just sent you an invite to collaborate on this repository at https://github.com/zulip/zulip/invitations. Please accept this invite in order to claim this issue and begin a fun, rewarding experience contributing to Zulip! Here's some tips to get you off to a good start: * Join me on the [Zulip developers' server](https://chat.zulip.org), to get help, chat about this issue, and meet the other developers. * Sign the [Dropbox Contributor License Agreement](https://opensource.dropbox.com/cla/), so that Zulip can use your code. * [Unwatch this repository](https://help.github.com/articles/unwatching-repositories/), so that you don't get 100 emails a day. As you work on this issue, you'll also want to refer to the [Zulip code contribution guide](https://zulip.readthedocs.io/en/latest/contributing/index.html), as well as the rest of the developer documentation on that site. See you on the other side (that is, the pull request side)! Hey @rishig , looks like the webhook integration guidelines have been removed. Can you provide me with an alternative source? @RavicharanN You might want to read this to proceed with the webhook integrations : https://zulip.readthedocs.io/en/1.7.1/webhook-walkthrough.html and PR for this issue is #7245
2018-02-15T16:16:15
zulip/zulip
8,684
zulip__zulip-8684
[ "8669" ]
7ce139a798555a5488ea27cdafb0f30f6ec4b543
diff --git a/version.py b/version.py --- a/version.py +++ b/version.py @@ -8,4 +8,4 @@ # Typically, adding a dependency only requires a minor version bump, and # removing a dependency requires a major version bump. -PROVISION_VERSION = '15.9' +PROVISION_VERSION = '15.10'
diff --git a/frontend_tests/node_tests/people.js b/frontend_tests/node_tests/people.js --- a/frontend_tests/node_tests/people.js +++ b/frontend_tests/node_tests/people.js @@ -2,7 +2,7 @@ zrequire('util'); zrequire('people'); set_global('blueslip', { - error: function () { return undefined; }, + error: function () { return; }, }); set_global('page_params', {}); set_global('md5', function (s) { @@ -555,7 +555,7 @@ initialize(); assert.equal(email, '[email protected]'); // Test undefined slug - people.emails_strings_to_user_ids_string = function () { return undefined; }; + people.emails_strings_to_user_ids_string = function () { return; }; assert.equal(people.emails_to_slug(), undefined); }()); diff --git a/frontend_tests/node_tests/people_errors.js b/frontend_tests/node_tests/people_errors.js --- a/frontend_tests/node_tests/people_errors.js +++ b/frontend_tests/node_tests/people_errors.js @@ -107,7 +107,7 @@ people.initialize_current_user(me.user_id); assert(reply_to.indexOf('?') > -1); people.pm_with_user_ids = function () { return [42]; }; - people.get_person_from_user_id = function () { return undefined; }; + people.get_person_from_user_id = function () { return; }; global.blueslip.error = function (msg) { assert.equal(msg, 'Unknown people in message'); }; diff --git a/frontend_tests/node_tests/search_suggestion.js b/frontend_tests/node_tests/search_suggestion.js --- a/frontend_tests/node_tests/search_suggestion.js +++ b/frontend_tests/node_tests/search_suggestion.js @@ -53,7 +53,7 @@ topic_data.reset(); }; global.narrow_state.stream = function () { - return undefined; + return; }; var suggestions = search.get_suggestions(query); @@ -73,7 +73,7 @@ topic_data.reset(); }; global.narrow_state.stream = function () { - return undefined; + return; }; var ted = @@ -244,7 +244,7 @@ topic_data.reset(); }; global.narrow_state.stream = function () { - return undefined; + return; }; set_global('activity', { @@ -430,7 +430,7 @@ init(); }; global.narrow_state.stream = function () { - return undefined; + return; }; var suggestions = search.get_suggestions(query); @@ -466,7 +466,7 @@ init(); }; global.narrow_state.stream = function () { - return undefined; + return; }; var query = ''; diff --git a/frontend_tests/node_tests/topic_generator.js b/frontend_tests/node_tests/topic_generator.js --- a/frontend_tests/node_tests/topic_generator.js +++ b/frontend_tests/node_tests/topic_generator.js @@ -174,7 +174,7 @@ function is_odd(i) { return i % 2 === 1; } assert.equal(gen.next(), undefined); var undef = function () { - return undefined; + return; }; global.blueslip.error = function (msg) { @@ -315,7 +315,7 @@ function is_odd(i) { return i % 2 === 1; } unread.num_unread_for_person = function (user_ids_string) { if (user_ids_string === 'unk') { - return undefined; + return; } if (user_ids_string === 'read') {
lint rules: Prevent `return undefined;` We should sweep the code to replace `return undefined;` with `return;`, and then make a lint rule for it, either via eslint (if they support that) or by making a custom rule.
Hello @zulip/server-testing members, this issue was labeled with the **area: testing-infrastructure** label, so you may want to check it out! <!-- areaLabelNotification --> @zulipbot claim
2018-03-13T12:10:51
zulip/zulip
8,713
zulip__zulip-8713
[ "6990" ]
45f0df6d314b3a1d23d54535e475a9918dee47e3
diff --git a/zerver/lib/integrations.py b/zerver/lib/integrations.py --- a/zerver/lib/integrations.py +++ b/zerver/lib/integrations.py @@ -284,6 +284,7 @@ def __init__(self, name: str, *args: Any, **kwargs: Any) -> None: WebhookIntegration('circleci', ['continuous-integration'], display_name='CircleCI'), WebhookIntegration('codeship', ['continuous-integration', 'deployment']), WebhookIntegration('crashlytics', ['monitoring']), + WebhookIntegration('dialogflow', ['customer-support'], display_name='Dialogflow'), WebhookIntegration('delighted', ['customer-support', 'marketing'], display_name='Delighted'), WebhookIntegration( 'deskdotcom', diff --git a/zerver/webhooks/dialogflow/__init__.py b/zerver/webhooks/dialogflow/__init__.py new file mode 100644 diff --git a/zerver/webhooks/dialogflow/view.py b/zerver/webhooks/dialogflow/view.py new file mode 100644 --- /dev/null +++ b/zerver/webhooks/dialogflow/view.py @@ -0,0 +1,33 @@ +# Webhooks for external integrations. +from typing import Text, Any, Dict +from django.http import HttpRequest, HttpResponse +from zerver.decorator import api_key_only_webhook_view +from zerver.lib.actions import check_send_private_message +from zerver.lib.request import REQ, has_request_variables +from zerver.lib.response import json_success +from zerver.models import UserProfile, get_user_profile_by_email + +@api_key_only_webhook_view("dialogflow") +@has_request_variables +def api_dialogflow_webhook(request: HttpRequest, user_profile: UserProfile, + payload: Dict[str, Any]=REQ(argument_type='body'), + email: str=REQ(default='foo')) -> HttpResponse: + status = payload["status"]["code"] + + if status == 200: + result = payload["result"]["fulfillment"]["speech"] + if not result: + alternate_result = payload["alternateResult"]["fulfillment"]["speech"] + if not alternate_result: + body = u"DialogFlow couldn't process your query." + else: + body = alternate_result + else: + body = result + else: + error_status = payload["status"]["errorDetails"] + body = u"{} - {}".format(status, error_status) + + profile = get_user_profile_by_email(email) + check_send_private_message(user_profile, request.client, profile, body) + return json_success()
diff --git a/zerver/webhooks/dialogflow/tests.py b/zerver/webhooks/dialogflow/tests.py new file mode 100644 --- /dev/null +++ b/zerver/webhooks/dialogflow/tests.py @@ -0,0 +1,66 @@ +# -*- coding: utf -*- +from typing import Text +from zerver.lib.test_classes import WebhookTestCase + +class DialogflowHookTests(WebhookTestCase): + URL_TEMPLATE = u"/api/v1/external/dialogflow?api_key={api_key}&[email protected]" + + def test_dialogflow_default(self) -> None: + self.url = self.build_webhook_url( + email="[email protected]", + username="aaron", + user_ip="127.0.0.1" + ) + expected_message = u"Today the weather in Delhi: Sunny, And the tempreture is 65F" + self.send_and_test_private_message('default', + expected_message, + content_type="application/json") + + def test_dialogflow_weather_app(self) -> None: + self.url = self.build_webhook_url( + email="[email protected]", + username="aaron", + user_ip="127.0.0.1" + ) + expected_message = u"The weather sure looks great !" + self.send_and_test_private_message('weather_app', + expected_message, + content_type="application/json") + + def test_dialogflow_alternate_result(self) -> None: + self.url = self.build_webhook_url( + email="[email protected]", + username="aaron", + user_ip="127.0.0.1" + ) + expected_message = u"Weather in New Delhi is nice!" + self.send_and_test_private_message('alternate_result', + expected_message, + content_type="application/json") + + def test_dialogflow_error_status(self) -> None: + self.url = self.build_webhook_url( + email="[email protected]", + username="aaron", + user_ip="127.0.0.1" + ) + expected_message = u"403 - Access Denied" + self.send_and_test_private_message('error_status', + expected_message, + content_type="application/json") + + def test_dialogflow_exception(self) -> None: + self.url = self.build_webhook_url( + email="[email protected]", + username="aaron", + user_ip="127.0.0.1" + ) + expected_message = u"DialogFlow couldn't process your query." + self.send_and_test_private_message('exception', + expected_message, + content_type="application/json") + + def get_body(self, fixture_name: Text) -> Text: + return self.fixture_data("dialogflow", + fixture_name, + file_type="json")
Support Dialogflow (API.AI) Zulip is not listed as a supported integration in Dialogflow(previously API.AI). Figure out what to do to add Zulip to this list and do the same. https://dialogflow.com/docs/integrations/
Hello @zulip/server-integrations members, this issue was labeled with the **area: integrations** label, so you may want to check it out! @hackerkid do you have a sense how much work adding this would be / how much that service is used? I'm trying to think about how to prioritize this. @timabbott I am not aware of how much work is needed for this. I will do some research and get back to this. @zulipbot claim
2018-03-15T19:01:35
zulip/zulip
8,805
zulip__zulip-8805
[ "8751" ]
d32d7a9b4d98a83d049088069087e1ef694196c5
diff --git a/zerver/management/commands/makemessages.py b/zerver/management/commands/makemessages.py --- a/zerver/management/commands/makemessages.py +++ b/zerver/management/commands/makemessages.py @@ -133,6 +133,7 @@ def my_templatize(src: Text, *args: Any, **kwargs: Any) -> Text: try: ignore_patterns = options.get('ignore_patterns', []) ignore_patterns.append('docs/*') + ignore_patterns.append('var/*') options['ignore_patterns'] = ignore_patterns super().handle(*args, **options) finally:
Errors when running `manage.py makemessages` I get these errors in my local development environment: ``` (zulip-py3-venv) tabbott@zaset:~/zulip$ ./manage.py makemessages UnicodeDecodeError: skipped file brainstorm_notes.txt in ./var/uploads/files/15/3d/47qkB-BgaArZ7wrTMTr-nsTK (reason: 'utf-8' codec can't decode byte 0x8b in position 1: invalid start byte) UnicodeDecodeError: skipped file -.txt in ./var/uploads/files/15/9e/fqVojOZvoTZuGZ39r2_37NBn (reason: 'utf-8' codec can't decode byte 0x8b in position 1: invalid start byte) UnicodeDecodeError: skipped file -.txt in ./var/uploads/files/2/fc/IfxNDeGaie57gWdOOok1Pyb5 (reason: 'utf-8' codec can't decode byte 0x8b in position 1: invalid start byte) processing locale ca processing locale es ``` I'm not sure why `manage.py makemessages` is parsing these uploaded files at all. @umairwaheed can you try to track this down? We don't have a clear reproducer, but it seems like this sort of thing should be findable when reading code.
Hello @zulip/server-i18n members, this issue was labeled with the "area: i18n" label, so you may want to check it out! <!-- areaLabelNotification --> Sure. Will do on this weekend. @zulipbot claim
2018-03-24T05:52:14
zulip/zulip
8,981
zulip__zulip-8981
[ "8958" ]
59a9b69c253d256b92a2ed9c08029d667b1cc1cf
diff --git a/zerver/views/messages.py b/zerver/views/messages.py --- a/zerver/views/messages.py +++ b/zerver/views/messages.py @@ -557,6 +557,33 @@ def exclude_muting_conditions(user_profile: UserProfile, return conditions +def get_base_query_for_search(user_profile: UserProfile, + need_message: bool, + need_user_message: bool) -> Tuple[Query, ColumnElement]: + if need_message and need_user_message: + query = select([column("message_id"), column("flags")], + column("user_profile_id") == literal(user_profile.id), + join(table("zerver_usermessage"), table("zerver_message"), + literal_column("zerver_usermessage.message_id") == + literal_column("zerver_message.id"))) + inner_msg_id_col = column("message_id") + return (query, inner_msg_id_col) + + if need_user_message: + query = select([column("message_id"), column("flags")], + column("user_profile_id") == literal(user_profile.id), + table("zerver_usermessage")) + inner_msg_id_col = column("message_id") + return (query, inner_msg_id_col) + + else: + assert(need_message) + query = select([column("id").label("message_id")], + None, + table("zerver_message")) + inner_msg_id_col = literal_column("zerver_message.id") + return (query, inner_msg_id_col) + def add_narrow_conditions(user_profile: UserProfile, inner_msg_id_col: ColumnElement, query: Query, @@ -595,10 +622,32 @@ def add_narrow_conditions(user_profile: UserProfile, return (query, is_search) def find_first_unread_anchor(sa_conn: Any, - inner_msg_id_col: ColumnElement, user_profile: UserProfile, - narrow: List[Dict[str, Any]], - query: Query) -> int: + narrow: List[Dict[str, Any]]) -> int: + # We always need UserMessage in our query, because it has the unread + # flag for the user. + need_user_message = True + + # TODO: + # We err on the side of putting Message in our query, but there are + # probably situations that we don't need it. We may eventually try + # to make add_narrow_conditions() and similar functions help us make + # possible optimizations. + need_message = True + + query, inner_msg_id_col = get_base_query_for_search( + user_profile=user_profile, + need_message=need_message, + need_user_message=need_user_message, + ) + + query, is_search = add_narrow_conditions( + user_profile=user_profile, + inner_msg_id_col=inner_msg_id_col, + query=query, + narrow=narrow, + ) + condition = column("flags").op("&")(UserMessage.flags.read.mask) == 0 # We exclude messages on muted topics when finding the first unread @@ -643,30 +692,29 @@ def get_messages_backend(request: HttpRequest, user_profile: UserProfile, apply_markdown: bool=REQ(validator=check_bool, default=True)) -> HttpResponse: include_history = ok_to_include_history(narrow, user_profile) - if include_history and not use_first_unread_anchor: + if include_history: # The initial query in this case doesn't use `zerver_usermessage`, # and isn't yet limited to messages the user is entitled to see! # # This is OK only because we've made sure this is a narrow that # will cause us to limit the query appropriately later. # See `ok_to_include_history` for details. - query = select([column("id").label("message_id")], None, table("zerver_message")) - inner_msg_id_col = literal_column("zerver_message.id") - elif narrow is None and not use_first_unread_anchor: - # This is limited to messages the user received, as recorded in `zerver_usermessage`. - query = select([column("message_id"), column("flags")], - column("user_profile_id") == literal(user_profile.id), - table("zerver_usermessage")) - inner_msg_id_col = column("message_id") + need_message = True + need_user_message = False + elif narrow is None: + # We need to limit to messages the user has received, but we don't actually + # need any fields from Message + need_message = False + need_user_message = True else: - # This is limited to messages the user received, as recorded in `zerver_usermessage`. - # TODO: Don't do this join if we're not doing a search - query = select([column("message_id"), column("flags")], - column("user_profile_id") == literal(user_profile.id), - join(table("zerver_usermessage"), table("zerver_message"), - literal_column("zerver_usermessage.message_id") == - literal_column("zerver_message.id"))) - inner_msg_id_col = column("message_id") + need_message = True + need_user_message = True + + query, inner_msg_id_col = get_base_query_for_search( + user_profile=user_profile, + need_message=need_message, + need_user_message=need_user_message, + ) query, is_search = add_narrow_conditions( user_profile=user_profile, @@ -690,10 +738,8 @@ def get_messages_backend(request: HttpRequest, user_profile: UserProfile, if use_first_unread_anchor: anchor = find_first_unread_anchor( sa_conn, - inner_msg_id_col, user_profile, narrow, - query ) anchored_to_left = (anchor == 0)
diff --git a/zerver/tests/test_narrow.py b/zerver/tests/test_narrow.py --- a/zerver/tests/test_narrow.py +++ b/zerver/tests/test_narrow.py @@ -1736,7 +1736,7 @@ def test_find_first_unread_anchor(self) -> None: self.make_stream('England') # Send a few messages that Hamlet won't have UserMessage rows for. - self.send_stream_message(cordelia.email, 'England') + unsub_message_id = self.send_stream_message(cordelia.email, 'England') self.send_personal_message(cordelia.email, othello.email) self.subscribe(hamlet, 'England') @@ -1747,37 +1747,50 @@ def test_find_first_unread_anchor(self) -> None: set_topic_mutes(hamlet, muted_topics) # send a muted message - self.send_stream_message(cordelia.email, 'England', topic_name='muted') + muted_message_id = self.send_stream_message(cordelia.email, 'England', topic_name='muted') # finally send Hamlet a "normal" message first_message_id = self.send_stream_message(cordelia.email, 'England') # send a few more messages - self.send_stream_message(cordelia.email, 'England') + extra_message_id = self.send_stream_message(cordelia.email, 'England') self.send_personal_message(cordelia.email, hamlet.email) sa_conn = get_sqlalchemy_connection() user_profile = hamlet - # TODO: Make it so that find_first_unread_anchor() does not require - # the incoming query to join to zerver_usermessage. - query = select([column("message_id"), column("flags")], - column("user_profile_id") == literal(user_profile.id), - join(table("zerver_usermessage"), table("zerver_message"), - literal_column("zerver_usermessage.message_id") == - literal_column("zerver_message.id"))) - inner_msg_id_col = column("message_id") - anchor = find_first_unread_anchor( sa_conn=sa_conn, - inner_msg_id_col=inner_msg_id_col, user_profile=user_profile, narrow=[], - query=query, ) self.assertEqual(anchor, first_message_id) + # With the same data setup, we now want to test that a reasonable + # search still gets the first message sent to Hamlet (before he + # subscribed) and other recent messages to the stream. + query_params = dict( + use_first_unread_anchor='true', + anchor=0, + num_before=10, + num_after=10, + narrow='[["stream", "England"]]' + ) + request = POSTRequestMock(query_params, user_profile) + + payload = get_messages_backend(request, user_profile) + result = ujson.loads(payload.content) + self.assertEqual(result['anchor'], first_message_id) + self.assertEqual(result['found_newest'], True) + self.assertEqual(result['found_oldest'], True) + + messages = result['messages'] + self.assertEqual( + {msg['id'] for msg in messages}, + {unsub_message_id, muted_message_id, first_message_id, extra_message_id} + ) + def test_use_first_unread_anchor_with_some_unread_messages(self) -> None: user_profile = self.example_user('hamlet') @@ -1958,7 +1971,7 @@ def test_use_first_unread_anchor_with_muted_topics(self) -> None: # the `message_id = LARGER_THAN_MAX_MESSAGE_ID` hack. queries = [q for q in all_queries if '/* get_messages */' in q['sql']] self.assertEqual(len(queries), 1) - self.assertIn('AND message_id = %d' % (LARGER_THAN_MAX_MESSAGE_ID,), + self.assertIn('AND zerver_message.id = %d' % (LARGER_THAN_MAX_MESSAGE_ID,), queries[0]['sql']) def test_exclude_muting_conditions(self) -> None:
get_messages: Fix interaction between include_history and use_first_unread_anchor @borisyankov helped me discover a subtle bug where `use_first_unread_anchor` and `include_history` don't interact correctly in a situation where the only messages on a topic were before the current user subscribed to that stream. Basically if you set things up like this: * Send a single message to a new topic "test" to a public stream Hamlet is not subscribed to ("Denmark" in this example). * Subscribe hamlet to that stream * Do the API query the mobile app does to try to view topic "test". (e.g. stream Denmark, topic "test", use_first_unread_anchor=True, anchor=0, num_before=25, num_after=25). Then the API query will return 0 messages (!). So effectively one sees messages in the webapp, but none appear on mobile. Here's how this happens: * Because `use_first_unread_anchor` is True, we end up going down the non-`include_history` code path in `get_messages_backend` and thus joining with UserMessage. The user has 0 rows for that topic in UserMessage (since they weren't subscribed when the message was sent), so everything from there on returns no messages. What we should actually be doing is in `get_messages_backend`: * The main query should just be on `Message` (i.e. we should get rid of the `and not use_first_unread_anchor` part of the `include_history` line) * In the `use_first_unread_anchor` code block, we should check `include_history`, and if True, we do the join against UserMessage in constructing `first_unread_query`, since we do need that join for getting the first unread message ID (just not for fetching the messages themselves, i.e. no changes to the main query). This should require a moderately sized refactor. * The rest of the code path correctly splits the logic based on whether `include_history` is True (i.e. does a UserMessage query to splice in the `flags` data as needed). @showell since you were just in this code, would you be up for tackling this in the next day or so? Ideally we'd have a fix in before the 1.8 release (aka Monday).
Hello @zulip/server-api members, this issue was labeled with the "area: api" label, so you may want to check it out! <!-- areaLabelNotification -->
2018-04-05T21:22:54
zulip/zulip
9,015
zulip__zulip-9015
[ "9003", "9003" ]
e92838a31fff05e3c1b57004d338b1ca33f30345
diff --git a/zerver/lib/slack_message_conversion.py b/zerver/lib/slack_message_conversion.py --- a/zerver/lib/slack_message_conversion.py +++ b/zerver/lib/slack_message_conversion.py @@ -73,8 +73,12 @@ def convert_to_zulip_markdown(text: str, users: List[ZerverFieldsT], text = convert_markdown_syntax(text, SLACK_ITALIC_REGEX, "*") # Map Slack's mention all: '<!everyone>' to '@**all** ' + # Map Slack's mention all: '<!channel>' to '@**all** ' + # Map Slack's mention all: '<!here>' to '@**all** ' # No regex for this as it can be present anywhere in the sentence text = text.replace('<!everyone>', '@**all**') + text = text.replace('<!channel>', '@**all**') + text = text.replace('<!here>', '@**all**') tokens = text.split(' ') for iterator in range(len(tokens)):
Import wildcard mentions from Slack into zulip When a user does a wildcard mention (i.e. `@channel`, `@here`, etc.), we should translate those to a zulip wildcard mention. I'd probably map them all to `@all` for now, but we should write the code in a way where changing the mapping is easy Import wildcard mentions from Slack into zulip When a user does a wildcard mention (i.e. `@channel`, `@here`, etc.), we should translate those to a zulip wildcard mention. I'd probably map them all to `@all` for now, but we should write the code in a way where changing the mapping is easy
Already implemented for `@everyone`: https://github.com/zulip/zulip/blob/65c4a43a82c3e8b377e0d586671590a0fdec3a41/zerver/lib/slack_message_conversion.py#L77. Can be readily extended to other wildcards. List of all the announcement types: `here`, `channel` and `everyone`. `all` has been deprecated. `group` is usually auto-completed to `channel` or list of users. Ref: 1. https://get.slack.help/hc/en-us/articles/202009646-Make-an-announcement Already implemented for `@everyone`: https://github.com/zulip/zulip/blob/65c4a43a82c3e8b377e0d586671590a0fdec3a41/zerver/lib/slack_message_conversion.py#L77. Can be readily extended to other wildcards. List of all the announcement types: `here`, `channel` and `everyone`. `all` has been deprecated. `group` is usually auto-completed to `channel` or list of users. Ref: 1. https://get.slack.help/hc/en-us/articles/202009646-Make-an-announcement
2018-04-07T10:04:24
zulip/zulip
9,035
zulip__zulip-9035
[ "8928" ]
c36a658fee985e5e6316aabb52d0325751a653ab
diff --git a/zerver/lib/slack_data_to_zulip_data.py b/zerver/lib/slack_data_to_zulip_data.py --- a/zerver/lib/slack_data_to_zulip_data.py +++ b/zerver/lib/slack_data_to_zulip_data.py @@ -71,8 +71,9 @@ def slack_workspace_to_realm(domain_name: str, realm_id: int, user_list: List[Ze zerver_useractivityinterval=[], zerver_realmfilter=[]) - zerver_userprofile, avatars, added_users = users_to_zerver_userprofile( - slack_data_dir, user_list, realm_id, int(NOW), domain_name) + zerver_userprofile, avatars, added_users, zerver_customprofilefield, \ + zerver_customprofilefield_value = users_to_zerver_userprofile(slack_data_dir, user_list, + realm_id, int(NOW), domain_name) channels_to_zerver_stream_fields = channels_to_zerver_stream(slack_data_dir, realm_id, added_users, @@ -84,6 +85,10 @@ def slack_workspace_to_realm(domain_name: str, realm_id: int, user_list: List[Ze # for documentation on zerver_defaultstream realm['zerver_userprofile'] = zerver_userprofile + # Custom profile fields + realm['zerver_customprofilefield'] = zerver_customprofilefield + realm['zerver_customprofilefield_value'] = zerver_customprofilefield_value + realm['zerver_defaultstream'] = channels_to_zerver_stream_fields[0] realm['zerver_stream'] = channels_to_zerver_stream_fields[1] realm['zerver_subscription'] = channels_to_zerver_stream_fields[3] @@ -130,23 +135,44 @@ def build_realmemoji(custom_emoji_list: ZerverFieldsT, def users_to_zerver_userprofile(slack_data_dir: str, users: List[ZerverFieldsT], realm_id: int, timestamp: Any, domain_name: str) -> Tuple[List[ZerverFieldsT], List[ZerverFieldsT], - AddedUsersT]: + AddedUsersT, + List[ZerverFieldsT], + List[ZerverFieldsT]]: """ Returns: 1. zerver_userprofile, which is a list of user profile 2. avatar_list, which is list to map avatars to zulip avatard records.json 3. added_users, which is a dictionary to map from slack user id to zulip user id + 4. zerver_customprofilefield, which is a list of all custom profile fields + 5. zerver_customprofilefield_values, which is a list of user profile fields """ logging.info('######### IMPORTING USERS STARTED #########\n') zerver_userprofile = [] + zerver_customprofilefield = [] # type: List[ZerverFieldsT] + zerver_customprofilefield_values = [] # type: List[ZerverFieldsT] avatar_list = [] # type: List[ZerverFieldsT] added_users = {} + # The user data we get from the slack api does not contain custom profile data + # Hence we get it from the slack zip file + slack_data_file_user_list = get_data_file(slack_data_dir + '/users.json') + + # To map user id with the custom profile fields of the corresponding user + slack_user_custom_field_map = {} + # To store custom fields corresponding to their ids + custom_field_map = {} # type: ZerverFieldsT + + for user in slack_data_file_user_list: + if 'fields' in user['profile']: + # Make sure the content of fields is not 'None' + if user['profile']['fields']: + slack_user_custom_field_map[user['id']] = user['profile']['fields'] + # We have only one primary owner in slack, see link # https://get.slack.help/hc/en-us/articles/201912948-Owners-and-Administrators # This is to import the primary owner first from all the users - user_id_count = 0 + user_id_count = custom_field_id_count = customprofilefield_id = 0 primary_owner_id = user_id_count user_id_count += 1 @@ -174,6 +200,17 @@ def users_to_zerver_userprofile(slack_data_dir: str, users: List[ZerverFieldsT], # timezone timezone = get_user_timezone(user) + # Check for custom profile fields + if slack_user_id in slack_user_custom_field_map: + # For processing the fields + custom_field_map, customprofilefield_id = build_customprofile_field( + zerver_customprofilefield, slack_user_custom_field_map[slack_user_id], + customprofilefield_id, realm_id, custom_field_map) + # Store the custom field values for the corresponding user + custom_field_id_count = build_customprofilefields_values( + custom_field_map, slack_user_custom_field_map[slack_user_id], user_id, + custom_field_id_count, zerver_customprofilefield_values) + userprofile = dict( enable_desktop_notifications=DESKTOP_NOTIFICATION, is_staff=False, # 'staff' is for server administrators, which don't exist in Slack. @@ -238,8 +275,54 @@ def users_to_zerver_userprofile(slack_data_dir: str, users: List[ZerverFieldsT], user_id_count += 1 logging.info(u"{} -> {}".format(user['name'], userprofile['email'])) + + process_customprofilefields(zerver_customprofilefield, zerver_customprofilefield_values) logging.info('######### IMPORTING USERS FINISHED #########\n') - return zerver_userprofile, avatar_list, added_users + return zerver_userprofile, avatar_list, added_users, zerver_customprofilefield, \ + zerver_customprofilefield_values + +def build_customprofile_field(customprofile_field: List[ZerverFieldsT], fields: ZerverFieldsT, + customprofilefield_id: int, realm_id: int, + custom_field_map: ZerverFieldsT) -> Tuple[ZerverFieldsT, int]: + # The name of the custom profile field is not provided in the slack data + # Hash keys of the fields are provided + # Reference: https://api.slack.com/methods/users.profile.set + for field, value in fields.items(): + if field not in custom_field_map: + field_name = ("slack custom field %s" % str(customprofilefield_id + 1)) + customprofilefield = dict( + id=customprofilefield_id, + realm=realm_id, + name=field_name, + field_type=1 # For now this is defaulted to 'SHORT_TEXT' + # Processing is done in the function 'process_customprofilefields' + ) + custom_field_map[field] = customprofilefield_id + customprofilefield_id += 1 + customprofile_field.append(customprofilefield) + return custom_field_map, customprofilefield_id + +def build_customprofilefields_values(custom_field_map: ZerverFieldsT, fields: ZerverFieldsT, + user_id: int, custom_field_id: int, + custom_field_values: List[ZerverFieldsT]) -> int: + for field, value in fields.items(): + custom_field_value = dict( + id=custom_field_id, + user_profile=user_id, + field=custom_field_map[field], + value=value['value']) + custom_field_values.append(custom_field_value) + custom_field_id += 1 + return custom_field_id + +def process_customprofilefields(customprofilefield: List[ZerverFieldsT], + customprofilefield_value: List[ZerverFieldsT]) -> None: + # Process the field types by checking all field values + for field in customprofilefield: + for field_value in customprofilefield_value: + if field_value['field'] == field['id'] and len(field_value['value']) > 50: + field['field_type'] = 2 # corresponding to Long Text + break def get_user_email(user: ZerverFieldsT, domain_name: str) -> str: if 'email' in user['profile']:
diff --git a/zerver/tests/test_slack_importer.py b/zerver/tests/test_slack_importer.py --- a/zerver/tests/test_slack_importer.py +++ b/zerver/tests/test_slack_importer.py @@ -145,13 +145,19 @@ def test_get_timezone(self) -> None: self.assertEqual(get_user_timezone(user_timezone_none), "America/New_York") self.assertEqual(get_user_timezone(user_no_timezone), "America/New_York") - def test_users_to_zerver_userprofile(self) -> None: + @mock.patch("zerver.lib.slack_data_to_zulip_data.get_data_file") + def test_users_to_zerver_userprofile(self, mock_get_data_file: mock.Mock) -> None: + custom_profile_field_user1 = {"Xf06054BBB": {"value": "random1"}, + "Xf023DSCdd": {"value": "employee"}} + custom_profile_field_user2 = {"Xf06054BBB": {"value": "random2"}, + "Xf023DSCdd": {"value": "employer"}} user_data = [{"id": "U08RGD1RD", "team_id": "T5YFFM2QY", "name": "john", "deleted": False, "real_name": "John Doe", - "profile": {"image_32": "", "email": "[email protected]", "avatar_hash": "hash"}}, + "profile": {"image_32": "", "email": "[email protected]", "avatar_hash": "hash", + "fields": custom_profile_field_user1}}, {"id": "U0CBK5KAT", "team_id": "T5YFFM2QY", "is_admin": True, @@ -162,6 +168,7 @@ def test_users_to_zerver_userprofile(self) -> None: "real_name": "Jane Doe", "deleted": False, "profile": {"image_32": "https:\/\/secure.gravatar.com\/avatar\/random.png", + "fields": custom_profile_field_user2, "email": "[email protected]", "avatar_hash": "hash"}}, {"id": "U09TYF5Sk", "team_id": "T5YFFM2QY", @@ -172,6 +179,7 @@ def test_users_to_zerver_userprofile(self) -> None: "profile": {"image_32": "https:\/\/secure.gravatar.com\/avatar\/random1.png", "email": "[email protected]", "avatar_hash": "hash"}}] + mock_get_data_file.return_value = user_data # As user with slack_id 'U0CBK5KAT' is the primary owner, that user should be imported first # and hence has zulip_id = 1 test_added_users = {'U08RGD1RD': 1, @@ -179,8 +187,19 @@ def test_users_to_zerver_userprofile(self) -> None: 'U09TYF5Sk': 2} slack_data_dir = './random_path' timestamp = int(timezone_now().timestamp()) - zerver_userprofile, avatar_list, added_users = users_to_zerver_userprofile( - slack_data_dir, user_data, 1, timestamp, 'test_domain') + mock_get_data_file.return_value = user_data + zerver_userprofile, avatar_list, added_users, customprofilefield, \ + customprofilefield_value = users_to_zerver_userprofile(slack_data_dir, user_data, 1, + timestamp, 'test_domain') + + # Test custom profile fields + self.assertEqual(customprofilefield[0]['field_type'], 1) + self.assertEqual(customprofilefield[1]['name'], 'slack custom field 2') + + self.assertEqual(len(customprofilefield_value), 4) + self.assertEqual(customprofilefield_value[0]['field'], 0) + self.assertEqual(customprofilefield_value[0]['user_profile'], 1) + self.assertEqual(customprofilefield_value[2]['user_profile'], 0) # test that the primary owner should always be imported first self.assertDictEqual(added_users, test_added_users) @@ -316,7 +335,7 @@ def test_channels_to_zerver_stream(self, mock_get_data_file: mock.Mock) -> None: @mock.patch("zerver.lib.slack_data_to_zulip_data.build_zerver_realm", return_value=[{}]) @mock.patch("zerver.lib.slack_data_to_zulip_data.users_to_zerver_userprofile", - return_value=[[], [], {}]) + return_value=[[], [], {}, [], []]) @mock.patch("zerver.lib.slack_data_to_zulip_data.channels_to_zerver_stream", return_value=[[], [], {}, [], [], {}]) def test_slack_workspace_to_realm(self, mock_channels_to_zerver_stream: mock.Mock,
slack import: Support importing custom profile fields We don't currently support custom profile fields. I think @rheaparekh is working on this, so pre-assigning it to her.
2018-04-09T12:25:11
zulip/zulip
9,272
zulip__zulip-9272
[ "9227" ]
0a7d1bc7460b57e54dc750783eb9f45705e12995
diff --git a/zerver/lib/bugdown/help_settings_links.py b/zerver/lib/bugdown/help_settings_links.py --- a/zerver/lib/bugdown/help_settings_links.py +++ b/zerver/lib/bugdown/help_settings_links.py @@ -35,8 +35,6 @@ 'deactivated-users-admin': ['Manage organization', 'Deactivated users', '/#organization/deactivated-users-admin'], 'bot-list-admin': ['Manage organization', 'Bots', '/#organization/bot-list-admin'], - 'streams-list-admin': ['Manage organization', 'Delete streams', - '/#organization/streams-list-admin'], 'default-streams-list': ['Manage organization', 'Default streams', '/#organization/default-streams-list'], 'filter-settings': ['Manage organization', 'Filter settings',
diff --git a/frontend_tests/node_tests/templates.js b/frontend_tests/node_tests/templates.js --- a/frontend_tests/node_tests/templates.js +++ b/frontend_tests/node_tests/templates.js @@ -329,8 +329,7 @@ function render(template_name, args) { }; var html = render('admin_tab', args); var admin_features = ["admin_users_table", "admin_bots_table", - "admin_streams_table", "admin_deactivated_users_table", - "admin_invites_table"]; + "admin_deactivated_users_table", "admin_invites_table"]; _.each(admin_features, function (admin_feature) { assert.notEqual($(html).find("#" + admin_feature).length, 0); });
Remove the "Delete streams" administrative tab Now that we have a "Delete stream" button in the main streams UI, which has lots more context on description/traffic/subscribers, this page is useless. We should just remove it. We should make sure to remove the documentation in /help/ linking to this as well.
Hello @zulip/server-settings members, this issue was labeled with the "area: settings (admin/org)" label, so you may want to check it out! <!-- areaLabelNotification --> @zulipbot claim
2018-04-30T12:19:30
zulip/zulip
9,292
zulip__zulip-9292
[ "9251" ]
a8830ec8da9fede8910d90a09861dcd8114580f6
diff --git a/zerver/lib/push_notifications.py b/zerver/lib/push_notifications.py --- a/zerver/lib/push_notifications.py +++ b/zerver/lib/push_notifications.py @@ -445,13 +445,24 @@ def get_text(elem: LH.HtmlElement) -> Text: # Handles realm emojis, avatars etc. if elem.tag == "img": return elem.get("alt", "") + if elem.tag == 'blockquote': + return '' # To avoid empty line before quote text + return elem.text or '' - return elem.text or "" + def format_as_quote(quote_text: Text) -> Text: + quote_text_list = filter(None, quote_text.split('\n')) # Remove empty lines + quote_text = '\n'.join(map(lambda x: "> "+x, quote_text_list)) + quote_text += '\n' + return quote_text def process(elem: LH.HtmlElement) -> Text: plain_text = get_text(elem) + sub_text = '' for child in elem: - plain_text += process(child) + sub_text += process(child) + if elem.tag == 'blockquote': + sub_text = format_as_quote(sub_text) + plain_text += sub_text plain_text += elem.tail or "" return plain_text
diff --git a/zerver/tests/fixtures/markdown_test_cases.json b/zerver/tests/fixtures/markdown_test_cases.json --- a/zerver/tests/fixtures/markdown_test_cases.json +++ b/zerver/tests/fixtures/markdown_test_cases.json @@ -52,20 +52,20 @@ "name": "fenced_quote", "input": "Hamlet said:\n~~~ quote\nTo be or **not** to be.\n\nThat is the question\n~~~", "expected_output": "<p>Hamlet said:</p>\n<blockquote>\n<p>To be or <strong>not</strong> to be.</p>\n<p>That is the question</p>\n</blockquote>", - "text_content": "Hamlet said:\n\nTo be or not to be.\nThat is the question\n" + "text_content": "Hamlet said:\n> To be or not to be.\n> That is the question\n" }, { "name": "fenced_nested_quote", "input": "Hamlet said:\n~~~ quote\nPolonius said:\n> This above all: to thine ownself be true,\nAnd it must follow, as the night the day,\nThou canst not then be false to any man.\n\nWhat good advice!\n~~~", "expected_output": "<p>Hamlet said:</p>\n<blockquote>\n<p>Polonius said:</p>\n<blockquote>\n<p>This above all: to thine ownself be true,<br>\nAnd it must follow, as the night the day,<br>\nThou canst not then be false to any man.</p>\n</blockquote>\n<p>What good advice!</p>\n</blockquote>", - "text_content": "Hamlet said:\n\nPolonius said:\n\nThis above all: to thine ownself be true,\nAnd it must follow, as the night the day,\nThou canst not then be false to any man.\n\nWhat good advice!\n" + "text_content": "Hamlet said:\n> Polonius said:\n> > This above all: to thine ownself be true,\n> > And it must follow, as the night the day,\n> > Thou canst not then be false to any man.\n> What good advice!\n" }, { "name": "complexly_nested_quote", "input": "I heard about this second hand...\n~~~ quote\n\nHe said:\n~~~ quote\nThe customer is complaining.\n\nThey looked at this code:\n``` \ndef hello(): print 'hello\n```\nThey would prefer:\n~~~\ndef hello()\n puts 'hello'\nend\n~~~\n\nPlease advise.\n~~~\n\nShe said:\n~~~ quote\nJust send them this:\n```\necho \"hello\n\"\n```\n~~~", "expected_output": "<p>I heard about this second hand...</p>\n<blockquote>\n<p>He said:</p>\n<blockquote>\n<p>The customer is complaining.</p>\n<p>They looked at this code:</p>\n<div class=\"codehilite\"><pre><span></span>def hello(): print &#39;hello\n</pre></div>\n\n\n<p>They would prefer:</p>\n</blockquote>\n<p>def hello()<br>\n puts 'hello'<br>\nend</p>\n</blockquote>\n<p>Please advise.</p>\n<div class=\"codehilite\"><pre><span></span>She said:\n~~~ quote\nJust send them this:\n```\necho &quot;hello\n&quot;\n```\n</pre></div>", "marked_expected_output": "<p>I heard about this second hand...</p>\n<blockquote>\n<p>He said:</p>\n<blockquote>\n<p>The customer is complaining.</p>\n<p>They looked at this code:</p>\n<div class=\"codehilite\"><pre><span></span>def hello(): print &#39;hello\n</pre></div>\n\n\n<p>They would prefer:</p>\n</blockquote>\n<p>def hello()<br>\n puts &#39;hello&#39;<br>\nend</p>\n</blockquote>\n<p>Please advise.</p>\n<div class=\"codehilite\"><pre><span></span>\nShe said:\n~~~ quote\nJust send them this:\n```\necho &quot;hello\n&quot;\n```\n</pre></div>", - "text_content": "I heard about this second hand...\n\nHe said:\n\nThe customer is complaining.\nThey looked at this code:\ndef hello(): print 'hello\n\n\n\nThey would prefer:\n\ndef hello()\n puts 'hello'\nend\n\nPlease advise.\nShe said:\n~~~ quote\nJust send them this:\n```\necho \"hello\n\"\n```\n" + "text_content": "I heard about this second hand...\n> He said:\n> > The customer is complaining.\n> > They looked at this code:\n> > def hello(): print 'hello\n> > They would prefer:\n> def hello()\n> puts 'hello'\n> end\n\nPlease advise.\nShe said:\n~~~ quote\nJust send them this:\n```\necho \"hello\n\"\n```\n" }, { "name": "fenced_quotes_inside_mathblock", @@ -92,7 +92,7 @@ "name": "fenced_quote_with_hashtag", "input": "```quote\n# line 1\n# line 2\n```", "expected_output": "<blockquote>\n<p># line 1<br>\n# line 2</p>\n</blockquote>", - "text_content": "\n# line 1\n# line 2\n" + "text_content": "> # line 1\n> # line 2\n" }, { "name": "dangerous_block", @@ -285,7 +285,7 @@ "input": ">Google logo today:\n>https://www.google.com/images/srpr/logo4w.png\n>Kinda boring", "expected_output": "<blockquote>\n<p>Google logo today:<br>\n<a href=\"https://www.google.com/images/srpr/logo4w.png\" target=\"_blank\" title=\"https://www.google.com/images/srpr/logo4w.png\">https://www.google.com/images/srpr/logo4w.png</a><br>\nKinda boring</p>\n<div class=\"message_inline_image\"><a href=\"https://www.google.com/images/srpr/logo4w.png\" target=\"_blank\" title=\"https://www.google.com/images/srpr/logo4w.png\"><img src=\"https://www.google.com/images/srpr/logo4w.png\"></a></div></blockquote>", "backend_only_rendering": true, - "text_content": "\nGoogle logo today:\nhttps:\/\/www.google.com\/images\/srpr\/logo4w.png\nKinda boring\n" + "text_content": "> Google logo today:\n> https:\/\/www.google.com\/images\/srpr\/logo4w.png\n> Kinda boring\n" }, { "name": "two_inline_images",
Make our `text_output` markdown integration do something reasonable with block quotes Right now, if a user sends a block-quoted message to another user and it ends up in a push notification, one can end up with what would read in the webapp as ``` > something your reply ``` looking like "something\n your reply" in the `text_output`, which is super confusing, since it makes it look like you said "something", when actually you're replying to it. What we should do is translate the HTML `<blockquote>something</blockquote>` tag that we generate in Zulip into a `> something\n`, instead of "something". Or if we can't get that working in a way that feels good because of issues around newlines, just hide the `<blockquote>` content entirely. (This is the correct way to fix https://github.com/zulip/zulip-mobile/issues/2419). @HarshitOnGitHub this seems like a good quick issue for you, given your expertise with that feature :).
@zulipbot claim
2018-05-02T05:13:04
zulip/zulip
9,315
zulip__zulip-9315
[ "9240" ]
7cbff8b521c90ce0df2bfadf3552c7c778bee35f
diff --git a/zerver/lib/actions.py b/zerver/lib/actions.py --- a/zerver/lib/actions.py +++ b/zerver/lib/actions.py @@ -83,7 +83,7 @@ get_display_recipient_by_id, query_for_ids, get_huddle_recipient, \ UserGroup, UserGroupMembership, get_default_stream_groups, \ get_bot_services, get_bot_dicts_in_realm, DomainNotAllowedForRealmError, \ - get_services_for_bots + get_services_for_bots, DisposableEmailError from zerver.lib.alert_words import alert_words_in_realm from zerver.lib.avatar import avatar_url, avatar_url_from_dict @@ -4158,6 +4158,8 @@ def validate_email(user_profile: UserProfile, email: Text) -> Tuple[Optional[str email_allowed_for_realm(email, user_profile.realm) except DomainNotAllowedForRealmError: return _("Outside your domain."), None + except DisposableEmailError: + return _("Please use your real email address."), None try: validate_email_for_realm(user_profile.realm, email)
diff --git a/zerver/tests/test_settings.py b/zerver/tests/test_settings.py --- a/zerver/tests/test_settings.py +++ b/zerver/tests/test_settings.py @@ -113,6 +113,18 @@ def test_illegal_characters_in_name_changes(self) -> None: dict(full_name='Opheli*')) self.assert_json_error(json_result, 'Invalid characters in name!') + def test_change_email_to_disposable_email(self) -> None: + email = self.example_email("hamlet") + self.login(email) + realm = get_realm("zulip") + realm.disallow_disposable_email_addresses = True + realm.restricted_to_domain = False + realm.save() + + json_result = self.client_patch("/json/settings", + dict(email='[email protected]')) + self.assert_json_error(json_result, 'Please use your real email address.') + # This is basically a don't-explode test. def test_notify_settings(self) -> None: for notification_setting in UserProfile.notification_setting_types: diff --git a/zerver/tests/test_signup.py b/zerver/tests/test_signup.py --- a/zerver/tests/test_signup.py +++ b/zerver/tests/test_signup.py @@ -772,6 +772,23 @@ def test_invite_outside_domain_in_closed_realm(self) -> None: self.invite(external_address, ["Denmark"]), "Some emails did not validate, so we didn't send any invitations.") + def test_invite_using_disposable_email(self) -> None: + """ + In a realm with `restricted_to_domain = True`, you can't invite people + with a different domain from that of the realm or your e-mail address. + """ + zulip_realm = get_realm("zulip") + zulip_realm.restricted_to_domain = False + zulip_realm.disallow_disposable_email_addresses = True + zulip_realm.save() + + self.login(self.example_email("hamlet")) + external_address = "[email protected]" + + self.assert_json_error( + self.invite(external_address, ["Denmark"]), + "Some emails did not validate, so we didn't send any invitations.") + def test_invite_outside_domain_in_open_realm(self) -> None: """ In a realm with `restricted_to_domain = False`, you can invite people
Fix 500 when trying to change your email to a disposable email This validation error should be being reformatted as a JsonableError for the "email change" UI: ``` File "./zerver/decorator.py", line 605, in _wrapped_view_func return authenticate_log_and_execute_json(request, view_func, *args, **kwargs) File "./zerver/decorator.py", line 587, in authenticate_log_and_execute_json return rate_limit()(view_func)(request, user_profile, *args, **kwargs) File "./zerver/decorator.py", line 747, in wrapped_func return func(request, *args, **kwargs) File "./zerver/decorator.py", line 423, in _wrapped_view_func return view_func(request, *args, **kwargs) File "./zerver/lib/request.py", line 174, in _wrapped_view_func return view_func(request, *args, **kwargs) File "./zerver/views/user_settings.py", line 105, in json_change_settings error, skipped = validate_email(user_profile, new_email) File "./zerver/lib/actions.py", line 4113, in validate_email email_allowed_for_realm(email, user_profile.realm) File "./zerver/models.py", line 398, in email_allowed_for_realm raise DisposableEmailError zerver.models.DisposableEmailError ``` Tagging as a priority since this is part of our goal to get back to 0 known 500 errors in Zulip. @hackerkid this is probably a good task for you.
Hello @zulip/server-settings members, this issue was labeled with the "area: settings (user)" label, so you may want to check it out! <!-- areaLabelNotification --> @zulipbot claim
2018-05-04T16:48:34
zulip/zulip
9,345
zulip__zulip-9345
[ "9327" ]
01a7ed952d5943f12be080f1e7da1a5ceacdbcee
diff --git a/zerver/lib/actions.py b/zerver/lib/actions.py --- a/zerver/lib/actions.py +++ b/zerver/lib/actions.py @@ -98,7 +98,6 @@ from confirmation.models import Confirmation, create_confirmation_link from confirmation import settings as confirmation_settings -from six import unichr from zerver.lib.bulk_create import bulk_create_users from zerver.lib.create_user import random_api_key @@ -3915,7 +3914,7 @@ def decode_email_address(email: Text) -> Optional[Tuple[Text, Text]]: encoded_stream_name, token = msg_string.split('.') else: encoded_stream_name, token = msg_string.split('+') - stream_name = re.sub("%\d{4}", lambda x: unichr(int(x.group(0)[1:])), encoded_stream_name) + stream_name = re.sub("%\d{4}", lambda x: chr(int(x.group(0)[1:])), encoded_stream_name) return stream_name, token SubHelperT = Tuple[List[Dict[str, Any]], List[Dict[str, Any]], List[Dict[str, Any]]] diff --git a/zerver/views/email_log.py b/zerver/views/email_log.py --- a/zerver/views/email_log.py +++ b/zerver/views/email_log.py @@ -12,7 +12,7 @@ get_forward_address, set_forward_address, ) -from six.moves import urllib +import urllib from confirmation.models import Confirmation, confirmation_url import os diff --git a/zproject/email_backends.py b/zproject/email_backends.py --- a/zproject/email_backends.py +++ b/zproject/email_backends.py @@ -1,7 +1,7 @@ import logging from typing import List -from six.moves import configparser +import configparser import smtplib from email.mime.multipart import MIMEMultipart
Migrate away from `six` to pure Python 3 APIs We still have a couple dozen uses of the [six](https://pythonhosted.org/six/) library in our codebase -- see `git grep six`. This library exists to help write code that works on both Python 2 and Python 3; now that we're on Python 3 only, we can make things cleaner by replacing each of these with using the Python 3 forms directly. This is sort of a companion to #9203.
Hello @zulip/server-tooling members, this issue was labeled with the "area: tooling" label, so you may want to check it out! <!-- areaLabelNotification --> Just a quick note: We have a few pieces of code under `tools/` like `tools/linter_lib` and `tools/lister.py` that we are likely to want to keep Python 2+3 compatible, since the plan includes extracting them to a separate project eventually. @zulipbot claim Hello @badmon! Thanks for your interest in Zulip! You have attempted to claim an issue without the labels "help wanted", "good first issue". Since you're a new contributor, you can only claim and submit pull requests for issues with the [help wanted](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+no%3Aassignee+label%3A%22help+wanted%22) or [good first issue](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+no%3Aassignee+label%3A%22good+first+issue%22) labels. If this is your first time here, we recommend reading our [guide for new contributors](https://zulip.readthedocs.io/en/latest/overview/contributing.html) before getting started.
2018-05-09T03:22:58
zulip/zulip
9,650
zulip__zulip-9650
[ "9649" ]
182215d125dfc09ef6aed94ab8a8cc3b0f88996b
diff --git a/tools/linter_lib/custom_check.py b/tools/linter_lib/custom_check.py --- a/tools/linter_lib/custom_check.py +++ b/tools/linter_lib/custom_check.py @@ -188,6 +188,9 @@ def build_custom_checkers(by_lang): 'description': 'Do not concatenate i18n strings'}, {'pattern': '\+.*i18n\.t\(.+\)', 'description': 'Do not concatenate i18n strings'}, + {'pattern': '[.]includes[(]', + 'exclude': ['frontend_tests/'], + 'description': '.includes() is incompatible with Internet Explorer. Use .indexOf() !== -1 instead.'}, {'pattern': '[.]html[(]', 'exclude_pattern': '[.]html[(]("|\'|templates|html|message.content|sub.rendered_description|i18n.t|rendered_|$|[)]|error_text|widget_elem|[$]error|[$][(]"<p>"[)])', 'exclude': ['static/js/portico', 'static/js/lightbox.js', 'static/js/ui_report.js',
browser-support: .includes() not supported in internet explorer. Replace occurrences of `.includes()` with `.indexOf() !== -1` wherever possible, it should cover most of the cases. There are 6 occurrences of includes in the code excluding the front-end tests, replacing the occurrences would be a better idea than adding a polyfill. References: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/includes https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/includes
@zulipbot claim
2018-06-03T20:03:19
zulip/zulip
9,719
zulip__zulip-9719
[ "9611" ]
974f015837a002b9891b9c8f989d0860bfd8258e
diff --git a/analytics/management/commands/stream_stats.py b/analytics/management/commands/stream_stats.py --- a/analytics/management/commands/stream_stats.py +++ b/analytics/management/commands/stream_stats.py @@ -40,5 +40,5 @@ def handle(self, *args: Any, **options: str) -> None: active=True)),), end=' ') num_messages = len(Message.objects.filter(recipient=recipient)) print("%12d" % (num_messages,)) - print("%d invite-only streams" % (invite_only_count,)) + print("%d private streams" % (invite_only_count,)) print("")
diff --git a/docs/testing/manual-testing.md b/docs/testing/manual-testing.md --- a/docs/testing/manual-testing.md +++ b/docs/testing/manual-testing.md @@ -287,7 +287,7 @@ First, we start off with "positive" tests. - Have Cordelia subscribe to the stream. - Verify Cordelia can see the previous message. - Have Cordelia post a message to the stream. - - Have Hamlet create an invite-only stream with Cordelia + - Have Hamlet create a private stream with Cordelia invited and test a two-way conversation between the two users. diff --git a/frontend_tests/node_tests/templates.js b/frontend_tests/node_tests/templates.js --- a/frontend_tests/node_tests/templates.js +++ b/frontend_tests/node_tests/templates.js @@ -1240,7 +1240,7 @@ run_test('subscription_settings', () => { html += render('subscription_settings', sub); var div = $(html).find(".subscription-type"); - assert(div.text().indexOf('invite-only stream') > 0); + assert(div.text().indexOf('private stream') > 0); var anchor = $(html).find(".change-stream-privacy:first"); assert.equal(anchor.text(), "[translated: Change]");
Change "invite-only" wording to "private" ![screenshot from 2018-05-31 16-47-49](https://user-images.githubusercontent.com/549661/40807927-9f721af0-64f3-11e8-9a12-2de8aa2ee678.png) We should be referring to private streams as "private streams" everywhere that isn't a code-level identifier. (see https://chat.zulip.org/#narrow/stream/19-documentation/subject/private.20versus.20invite-only/near/586071)
These can read: **public stream** **private stream** with **shared history** **private stream** with **protected history** for the three cases. Hello @zulip/server-user-docs members, this issue was labeled with the "area: documentation (user)" label, so you may want to check it out! <!-- areaLabelNotification --> I'd love for this to happen before the 1.9.0 release, so our terminology is consistent in the released version. @eeshangarg can you take this?
2018-06-08T20:06:39
zulip/zulip
9,764
zulip__zulip-9764
[ "9763" ]
25c46e3abbdc76cddd42bbc8bcceb528cdaa9354
diff --git a/zerver/lib/bugdown/__init__.py b/zerver/lib/bugdown/__init__.py --- a/zerver/lib/bugdown/__init__.py +++ b/zerver/lib/bugdown/__init__.py @@ -1560,7 +1560,7 @@ def extendMarkdown(self, md: markdown.Markdown, md_globals: Dict[str, Any]) -> N 'tex', Tex(r'\B(?<!\$)\$\$(?P<body>[^\n_$](\\\$|[^$\n])*)\$\$(?!\$)\B'), '>backtick') - md.inlinePatterns.add('emoji', Emoji(EMOJI_REGEX), '_end') + md.inlinePatterns.add('emoji', Emoji(EMOJI_REGEX), '<nl') md.inlinePatterns.add('translate_emoticons', EmoticonTranslation(emoticon_regex), '>emoji') md.inlinePatterns.add('unicodeemoji', UnicodeEmoji(unicode_emoji_regex), '_end') md.inlinePatterns.add('link', AtomicLinkPattern(markdown.inlinepatterns.LINK_RE, md), '>avatar')
diff --git a/zerver/tests/fixtures/markdown_test_cases.json b/zerver/tests/fixtures/markdown_test_cases.json --- a/zerver/tests/fixtures/markdown_test_cases.json +++ b/zerver/tests/fixtures/markdown_test_cases.json @@ -384,6 +384,13 @@ "text_content": "a:) ;)b", "translate_emoticons": true }, + { + "name": "translate_emoticons_newline", + "input": ":) test\n:) test", + "expected_output": "<p><span class=\"emoji emoji-1f603\" title=\"smiley\">:smiley:</span> test<br>\n<span class=\"emoji emoji-1f603\" title=\"smiley\">:smiley:</span> test</p>", + "text_content": "\ud83d\ude03 test\n\ud83d\ude03 test", + "translate_emoticons": true + }, { "name": "translate_emoticons_in_code", "input": "`:)`",
Fix a bug in emoticon translation where newlines prevent translation. Currently, there is a bug in emoticon translation where a newline before an emoticon prevents it from being translated. This only occurs server-side, as the message initially is translated correctly in the local echo, but then becomes incorrect after receiving the render from the server. For instance, > :) test :) test is translated into > 😃 test 😃 test but > :) test > :) test is incorrectly translated into > 😃 test > :) test due to the newline. See [discussion](https://chat.zulip.org/#narrow/stream/9-issues/subject/display-setting.3A.20Translate.20emoticons/near/599540).
2018-06-18T03:42:34
zulip/zulip
9,825
zulip__zulip-9825
[ "9822" ]
c0751c0644a1f3ee2865b6ad743ce538a06ad321
diff --git a/version.py b/version.py --- a/version.py +++ b/version.py @@ -8,4 +8,4 @@ # Typically, adding a dependency only requires a minor version bump, and # removing a dependency requires a major version bump. -PROVISION_VERSION = '20.11' +PROVISION_VERSION = '21.0'
diff --git a/frontend_tests/node_tests/common.js b/frontend_tests/node_tests/common.js --- a/frontend_tests/node_tests/common.js +++ b/frontend_tests/node_tests/common.js @@ -6,3 +6,13 @@ run_test('basics', () => { common.autofocus('#home'); assert($('#home').is_focused()); }); + +run_test('phrase_match', () => { + assert(common.phrase_match('tes', 'test')); + assert(common.phrase_match('Tes', 'test')); + assert(common.phrase_match('Tes', 'Test')); + assert(common.phrase_match('tes', 'Stream Test')); + + assert(!common.phrase_match('tests', 'test')); + assert(!common.phrase_match('tes', 'hostess')); +}); diff --git a/frontend_tests/node_tests/search_suggestion.js b/frontend_tests/node_tests/search_suggestion.js --- a/frontend_tests/node_tests/search_suggestion.js +++ b/frontend_tests/node_tests/search_suggestion.js @@ -7,6 +7,7 @@ zrequire('stream_data'); zrequire('topic_data'); zrequire('people'); zrequire('unread'); +zrequire('common'); var search = zrequire('search_suggestion'); var bob = {
integrations: Match phrase instead of fuzzysearch From https://chat.zulip.org/#narrow/stream/9-issues/topic/integrations.20search
2018-06-25T16:08:20
zulip/zulip
9,837
zulip__zulip-9837
[ "9611" ]
a2a695dfa7a3fbd9d406dcce9c6299e41c6a445d
diff --git a/zerver/lib/actions.py b/zerver/lib/actions.py --- a/zerver/lib/actions.py +++ b/zerver/lib/actions.py @@ -2253,7 +2253,7 @@ def validate_user_access_to_subscribers_helper(user_profile: Optional[UserProfil return if (stream_dict["invite_only"] and not check_user_subscribed()): - raise JsonableError(_("Unable to retrieve subscribers for invite-only stream")) + raise JsonableError(_("Unable to retrieve subscribers for private stream")) def bulk_get_subscriber_user_ids(stream_dicts: Iterable[Mapping[str, Any]], user_profile: UserProfile, diff --git a/zerver/views/streams.py b/zerver/views/streams.py --- a/zerver/views/streams.py +++ b/zerver/views/streams.py @@ -308,7 +308,7 @@ def add_subscriptions_backend( if len(principals) > 0: if user_profile.realm.is_zephyr_mirror_realm and not all(stream.invite_only for stream in streams): - return json_error(_("You can only invite other Zephyr mirroring users to invite-only streams.")) + return json_error(_("You can only invite other Zephyr mirroring users to private streams.")) subscribers = set(principal_to_user_profile(user_profile, principal) for principal in principals) else: subscribers = set([user_profile])
diff --git a/zerver/tests/test_subs.py b/zerver/tests/test_subs.py --- a/zerver/tests/test_subs.py +++ b/zerver/tests/test_subs.py @@ -2770,9 +2770,9 @@ def test_list_respects_invite_only_bit(self) -> None: self.assertIn("subscriptions", result.json()) for sub in result.json()["subscriptions"]: if sub['name'] == "Normandy": - self.assertEqual(sub['invite_only'], False, "Normandy was mistakenly marked invite-only") + self.assertEqual(sub['invite_only'], False, "Normandy was mistakenly marked private") if sub['name'] == "Saxony": - self.assertEqual(sub['invite_only'], True, "Saxony was not properly marked invite-only") + self.assertEqual(sub['invite_only'], True, "Saxony was not properly marked private") @slow("lots of queries") def test_inviteonly(self) -> None:
Change "invite-only" wording to "private" ![screenshot from 2018-05-31 16-47-49](https://user-images.githubusercontent.com/549661/40807927-9f721af0-64f3-11e8-9a12-2de8aa2ee678.png) We should be referring to private streams as "private streams" everywhere that isn't a code-level identifier. (see https://chat.zulip.org/#narrow/stream/19-documentation/subject/private.20versus.20invite-only/near/586071)
These can read: **public stream** **private stream** with **shared history** **private stream** with **protected history** for the three cases. Hello @zulip/server-user-docs members, this issue was labeled with the "area: documentation (user)" label, so you may want to check it out! <!-- areaLabelNotification --> I'd love for this to happen before the 1.9.0 release, so our terminology is consistent in the released version. @eeshangarg can you take this? Thanks @eeshangarg! Re-opening, since I think there may be a few more, and it would be good to go through and also correct the few places where the content is now incorrect (e.g. what does the API now return for the privacy of a stream?) I found a few more, e.g. in zerver/views/streams.py, with the following command `git grep -Ini "invite[^_]only" -- ':(exclude)static/locale/' .;` I believe the scope of this ticket is only user-facing docs? Changing what we return in the API is a non-trivial deprecation problem. yeah, changing the API should not be a part of this. I assumed the API had been changed (and just the docs hadn't), but it looks like that isn't the case. In any case, the ones in zerver/views/streams.py are error and/or success messages, which I think we should update as a part of this.
2018-06-26T22:05:41
zulip/zulip
9,951
zulip__zulip-9951
[ "9913" ]
811ac718d631a993b02c773b9f2550099f01428b
diff --git a/zerver/lib/webhooks/git.py b/zerver/lib/webhooks/git.py --- a/zerver/lib/webhooks/git.py +++ b/zerver/lib/webhooks/git.py @@ -34,6 +34,7 @@ REMOVE_BRANCH_MESSAGE_TEMPLATE = "{user_name} deleted branch {branch_name}" PULL_REQUEST_OR_ISSUE_MESSAGE_TEMPLATE = "{user_name} {action} [{type}{id}]({url})" +PULL_REQUEST_OR_ISSUE_MESSAGE_TEMPLATE_WITH_TITLE = "{user_name} {action} [{type}{id} {title}]({url})" PULL_REQUEST_OR_ISSUE_ASSIGNEE_INFO_TEMPLATE = "(assigned to {assignee})" PULL_REQUEST_BRANCH_INFO_TEMPLATE = "\nfrom `{target}` to `{base}`" @@ -123,14 +124,21 @@ def get_remove_branch_event_message(user_name: str, branch_name: str) -> str: def get_pull_request_event_message(user_name: str, action: str, url: str, number: Optional[int]=None, target_branch: Optional[str]=None, base_branch: Optional[str]=None, message: Optional[str]=None, assignee: Optional[str]=None, - type: Optional[str]='PR') -> str: - main_message = PULL_REQUEST_OR_ISSUE_MESSAGE_TEMPLATE.format( - user_name=user_name, - action=action, - type=type, - url=url, - id=" #{}".format(number) if number is not None else '' - ) + type: Optional[str]='PR', title: Optional[str]=None) -> str: + kwargs = { + 'user_name': user_name, + 'action': action, + 'type': type, + 'url': url, + 'id': ' #{}'.format(number) if number is not None else '', + 'title': title, + } + + if title is not None: + main_message = PULL_REQUEST_OR_ISSUE_MESSAGE_TEMPLATE_WITH_TITLE.format(**kwargs) + else: + main_message = PULL_REQUEST_OR_ISSUE_MESSAGE_TEMPLATE.format(**kwargs) + if assignee: main_message += PULL_REQUEST_OR_ISSUE_ASSIGNEE_INFO_TEMPLATE.format(assignee=assignee) @@ -154,7 +162,8 @@ def get_issue_event_message(user_name: str, url: str, number: Optional[int]=None, message: Optional[str]=None, - assignee: Optional[str]=None) -> str: + assignee: Optional[str]=None, + title: Optional[str]=None) -> str: return get_pull_request_event_message( user_name, action, @@ -162,7 +171,8 @@ def get_issue_event_message(user_name: str, number, message=message, assignee=assignee, - type='Issue' + type='Issue', + title=title, ) def get_push_tag_event_message(user_name: str, diff --git a/zerver/webhooks/gitlab/view.py b/zerver/webhooks/gitlab/view.py --- a/zerver/webhooks/gitlab/view.py +++ b/zerver/webhooks/gitlab/view.py @@ -71,7 +71,8 @@ def get_issue_created_event_body(payload: Dict[str, Any]) -> str: get_object_url(payload), payload['object_attributes'].get('iid'), description, - get_objects_assignee(payload) + get_objects_assignee(payload), + title=payload['object_attributes'].get('title') ) def get_issue_event_body(payload: Dict[str, Any], action: str) -> str: @@ -80,6 +81,7 @@ def get_issue_event_body(payload: Dict[str, Any], action: str) -> str: action, get_object_url(payload), payload['object_attributes'].get('iid'), + title=payload['object_attributes'].get('title') ) def get_merge_request_updated_event_body(payload: Dict[str, Any]) -> str:
diff --git a/zerver/webhooks/gitlab/tests.py b/zerver/webhooks/gitlab/tests.py --- a/zerver/webhooks/gitlab/tests.py +++ b/zerver/webhooks/gitlab/tests.py @@ -93,7 +93,7 @@ def test_remove_tag_event_message(self) -> None: def test_create_issue_without_assignee_event_message(self) -> None: expected_subject = u"my-awesome-project / Issue #1 Issue title" - expected_message = u"Tomasz Kolek created [Issue #1](https://gitlab.com/tomaszkolek0/my-awesome-project/issues/1)\n\n~~~ quote\nIssue description\n~~~" + expected_message = u"Tomasz Kolek created [Issue #1 Issue title](https://gitlab.com/tomaszkolek0/my-awesome-project/issues/1)\n\n~~~ quote\nIssue description\n~~~" self.send_and_test_stream_message( 'issue_created_without_assignee', @@ -104,7 +104,7 @@ def test_create_issue_without_assignee_event_message(self) -> None: def test_create_issue_with_assignee_event_message(self) -> None: expected_subject = u"my-awesome-project / Issue #1 Issue title" - expected_message = u"Tomasz Kolek created [Issue #1](https://gitlab.com/tomaszkolek0/my-awesome-project/issues/1)(assigned to Tomasz Kolek)\n\n~~~ quote\nIssue description\n~~~" + expected_message = u"Tomasz Kolek created [Issue #1 Issue title](https://gitlab.com/tomaszkolek0/my-awesome-project/issues/1)(assigned to Tomasz Kolek)\n\n~~~ quote\nIssue description\n~~~" self.send_and_test_stream_message( 'issue_created_with_assignee', @@ -115,7 +115,7 @@ def test_create_issue_with_assignee_event_message(self) -> None: def test_create_issue_with_hidden_comment_in_description(self) -> None: expected_subject = u"public-repo / Issue #3 New Issue with hidden comment" - expected_message = u"Eeshan Garg created [Issue #3](https://gitlab.com/eeshangarg/public-repo/issues/3)\n\n~~~ quote\nThis description actually has a hidden comment in it!\n~~~" + expected_message = u"Eeshan Garg created [Issue #3 New Issue with hidden comment](https://gitlab.com/eeshangarg/public-repo/issues/3)\n\n~~~ quote\nThis description actually has a hidden comment in it!\n~~~" self.send_and_test_stream_message( 'issue_created_with_hidden_comment_in_description', @@ -126,7 +126,7 @@ def test_create_issue_with_hidden_comment_in_description(self) -> None: def test_create_issue_with_null_description(self) -> None: expected_subject = u"my-awesome-project / Issue #7 Issue without description" - expected_message = u"Eeshan Garg created [Issue #7](https://gitlab.com/eeshangarg/my-awesome-project/issues/7)" + expected_message = u"Eeshan Garg created [Issue #7 Issue without description](https://gitlab.com/eeshangarg/my-awesome-project/issues/7)" self.send_and_test_stream_message( 'issue_opened_with_null_description', expected_subject, @@ -136,7 +136,7 @@ def test_create_issue_with_null_description(self) -> None: def test_update_issue_event_message(self) -> None: expected_subject = u"my-awesome-project / Issue #1 Issue title_new" - expected_message = u"Tomasz Kolek updated [Issue #1](https://gitlab.com/tomaszkolek0/my-awesome-project/issues/1)" + expected_message = u"Tomasz Kolek updated [Issue #1 Issue title_new](https://gitlab.com/tomaszkolek0/my-awesome-project/issues/1)" self.send_and_test_stream_message( 'issue_updated', @@ -147,7 +147,7 @@ def test_update_issue_event_message(self) -> None: def test_close_issue_event_message(self) -> None: expected_subject = u"my-awesome-project / Issue #1 Issue title_new" - expected_message = u"Tomasz Kolek closed [Issue #1](https://gitlab.com/tomaszkolek0/my-awesome-project/issues/1)" + expected_message = u"Tomasz Kolek closed [Issue #1 Issue title_new](https://gitlab.com/tomaszkolek0/my-awesome-project/issues/1)" self.send_and_test_stream_message( 'issue_closed', @@ -158,7 +158,7 @@ def test_close_issue_event_message(self) -> None: def test_reopen_issue_event_message(self) -> None: expected_subject = u"my-awesome-project / Issue #1 Issue title_new" - expected_message = u"Tomasz Kolek reopened [Issue #1](https://gitlab.com/tomaszkolek0/my-awesome-project/issues/1)" + expected_message = u"Tomasz Kolek reopened [Issue #1 Issue title_new](https://gitlab.com/tomaszkolek0/my-awesome-project/issues/1)" self.send_and_test_stream_message( 'issue_reopened',
[Gitlab Integration] Add title to issue create message The integration works really well, but right now whenever an issue is created the Bot writes only "(User) created Issue#" followed by the description... What I'm missing is the the issue title before or even instead of the description
Hello @zulip/server-integrations members, this issue was labeled with the "area: integrations" label, so you may want to check it out! <!-- areaLabelNotification --> @eeshangarg can you look into this? I wonder whether what's happening is they're using the "single topic" feature and the issue title was in the topic being overwritten... (I guess it's possible we never did this correctly for GitLab; I'd originally misread the report as being for the GitHub integration). Similar thing happening when closing the issue, but there it’s maybe debatable if you’d want to see the title. I personally would say it makes sense in the form of “(user) closed issue (title)”
2018-07-13T19:32:18
zulip/zulip
10,098
zulip__zulip-10098
[ "10095" ]
a8e5551395f725e1a1ce2b9cd55d42640b8623c6
diff --git a/zerver/lib/zcommand.py b/zerver/lib/zcommand.py --- a/zerver/lib/zcommand.py +++ b/zerver/lib/zcommand.py @@ -14,18 +14,21 @@ def process_zcommands(content: str, user_profile: UserProfile) -> Dict[str, Any] ret = dict() # type: Dict[str, Any] return ret - if command == 'night': + night_commands = ['night', 'dark'] + day_commands = ['day', 'light'] + + if command in night_commands: if user_profile.night_mode: msg = 'You are still in night mode.' else: - msg = 'Changed to night mode! To revert night mode, type `/day`.' + msg = 'Changed to night mode! To revert night mode, type `%s`.' % (content,) do_set_user_display_setting(user_profile, 'night_mode', True) ret = dict(msg=msg) return ret - if command == 'day': + if command in day_commands: if user_profile.night_mode: - msg = 'Changed to day mode! To revert day mode, type `/night`.' + msg = 'Changed to day mode! To revert day mode, type `%s`.' % (content,) do_set_user_display_setting(user_profile, 'night_mode', False) else: msg = 'You are still in day mode.'
slash commands: Add /dark and /light commands. We have /night and /day, and people are starting to use them. We should add the aliases /dark and /light.
2018-07-28T21:43:38
zulip/zulip
10,180
zulip__zulip-10180
[ "10178" ]
a7d7f6cada4cb64cbe7897cc1b70b94910c0e3f4
diff --git a/zerver/signals.py b/zerver/signals.py --- a/zerver/signals.py +++ b/zerver/signals.py @@ -77,7 +77,8 @@ def email_on_new_login(sender: Any, user: UserProfile, request: Any, **kwargs: A if user_tz == '': user_tz = timezone_get_current_timezone_name() local_time = timezone_now().astimezone(get_timezone(user_tz)) - context['login_time'] = local_time.strftime('%A, %B %d, %Y at %I:%M%p ') + user_tz + utc_offset = local_time.strftime('%z') + context['login_time'] = local_time.strftime('%A, %B %d, %Y at %I:%M%p ') + utc_offset context['device_ip'] = request.META.get('REMOTE_ADDR') or _("Unknown IP address") context['device_os'] = get_device_os(user_agent) context['device_browser'] = get_device_browser(user_agent)
diff --git a/zerver/tests/test_new_users.py b/zerver/tests/test_new_users.py --- a/zerver/tests/test_new_users.py +++ b/zerver/tests/test_new_users.py @@ -33,7 +33,8 @@ def test_send_login_emails_if_send_login_email_setting_is_true(self) -> None: utc = get_timezone('utc') user_tz = get_timezone(user.timezone) mock_time = datetime.datetime(year=2018, month=1, day=1, tzinfo=utc) - reference_time = mock_time.astimezone(user_tz).strftime('%A, %B %d, %Y at %I:%M%p ') + user.timezone + utc_offset = mock_time.astimezone(user_tz).strftime('%z') + reference_time = mock_time.astimezone(user_tz).strftime('%A, %B %d, %Y at %I:%M%p ') + utc_offset with mock.patch('zerver.signals.timezone_now', return_value=mock_time): self.client_post("/accounts/login/", info={"username": user.email, "password": password}, HTTP_USER_AGENT=firefox_windows)
Fix timezone display format in "new login" emails I just got this email when testing: ``` Login details: Server: http://asdasf.zulipdev.com:9991 Account: [email protected] Time: Friday, August 03, 2018 at 03:35PM America/Los_Angeles Device: Chrome on Linux. IP Address: 127.0.0.1 ``` We should display the timezone as e.g. `-05:00`, not with a location. Tagging as a priority, because we've seen people be confused into thinking this suggested they were in LA.
2018-08-03T23:44:53
zulip/zulip
10,232
zulip__zulip-10232
[ "9817" ]
271c7fbe65df59436f4408deaf1d41d19e3c111c
diff --git a/zerver/models.py b/zerver/models.py --- a/zerver/models.py +++ b/zerver/models.py @@ -548,7 +548,7 @@ def filter_pattern_validator(value: str) -> None: raise ValidationError(error_msg) def filter_format_validator(value: str) -> None: - regex = re.compile(r'^[\.\/:a-zA-Z0-9_?=-]+%\(([a-zA-Z0-9_-]+)\)s[a-zA-Z0-9_-]*$') + regex = re.compile(r'^[\.\/:a-zA-Z0-9#_?=-]+%\(([a-zA-Z0-9_-]+)\)s[a-zA-Z0-9_-]*$') if not regex.match(value): raise ValidationError('URL format string must be in the following format: '
diff --git a/zerver/tests/test_realm_filters.py b/zerver/tests/test_realm_filters.py --- a/zerver/tests/test_realm_filters.py +++ b/zerver/tests/test_realm_filters.py @@ -44,7 +44,7 @@ def test_create(self) -> None: result = self.client_post("/json/realm/filters", info=data) self.assert_json_error(result, 'URL format string must be in the following format: `https://example.com/%(\\w+)s`') - data['url_format_string'] = 'https://realm.com/my_realm_filter/%(id)s' + data['url_format_string'] = 'https://realm.com/my_realm_filter/#hashtag/%(id)s' result = self.client_post("/json/realm/filters", info=data) self.assert_json_success(result)
Add support for `hash` based URLS in Filter patterns [Related source code](https://github.com/zulip/zulip/blob/0212113569748ecac083db85aa40b974d2d53d2a/zerver/models.py#L512) Snippet: ```python def filter_format_validator(value: str) -> None: regex = re.compile(r'^[\.\/:a-zA-Z0-9_?=-]+%\(([a-zA-Z0-9_-]+)\)s[a-zA-Z0-9_-]*$') # THE REGEX <<< if not regex.match(value): raise ValidationError('URL format string must be in the following format: ' '`https://example.com/%(\w+)s`') ``` Problem --------- I'm trying to add a filter to a site that uses a hash based router, which basically makes the url look like: `http://some-site.domain.com/#/c/$(id)s` And looking at `THE REGEX`, it does not allow a hash (`#`) in the url. Solution --------- I'm currently working directly on the container since I can't build the image offline, So my solution was to change the source code inside the container, so basically the new regex is something like: ```python r'^[\.\/:a-zA-Z0-9#_?=-]+%\(([a-zA-Z0-9_-]+)\)s[a-zA-Z0-9_-]*$' #^^^ ``` And the url is then accepted. I'm willing to submit a PR if needed.
Adding a hash in fine; feel free to submit a PR with a test in `test_realm_filters.py`. We generally want to make the rules for creating filters less unnecessarily limited, but we can fix this now and regardless it will be a helpful test case. Hello @zulip/server-settings members, this issue was labeled with the "area: settings (admin/org)" label, so you may want to check it out! <!-- areaLabelNotification --> @timabbot, I had a hard time working out the dev env. I can run manual tests, due to running on docker, but since I'm in windows, I'm having a hard time running the unit tests. Any advice on how to set up a proper dev env? @gioragutt one option is to just get a Linux remote dev VM; if you ask on "#development help", @hackerkid can create one for you in Zulip's Digital Ocean account (or you can create your own). @timabbott, I can alternatively use the docker compose way. I'm using docker for windows, so if I can either use docker OR a vm, not both. Can I run tests inside it? I mean, it sounds possible. I'll test it out and tell you. Thanks for the replys, I appreciate it. I'm constantly looking for feedback from our people, I'll surely add issues for the existing points, and future ones as well. The Docker production environment doesn't support running unit tests (we don't ship the files required in production images). For this specific issue, @aero31aero can you do a quick PR? You're working on that subsystem anyway and it seems like it'd be really quick.
2018-08-08T14:39:46
zulip/zulip
10,353
zulip__zulip-10353
[ "10297" ]
eb676e8e508df0349662addd6245ebf7ab235744
diff --git a/zproject/backends.py b/zproject/backends.py --- a/zproject/backends.py +++ b/zproject/backends.py @@ -107,20 +107,11 @@ def common_get_active_user(email: str, realm: Realm, return None return user_profile -def generate_dev_ldap_dir(mode: str, extra_users: int=0) -> Dict[str, Dict[str, Sequence[str]]]: +def generate_dev_ldap_dir(mode: str, num_users: int=8) -> Dict[str, Dict[str, Sequence[str]]]: mode = mode.lower() - names = [ - ("Zoe", "[email protected]"), - ("Othello, the Moor of Venice", "[email protected]"), - ("Iago", "[email protected]"), - ("Prospero from The Tempest", "[email protected]"), - ("Cordelia Lear", "[email protected]"), - ("King Hamlet", "[email protected]"), - ("aaron", "[email protected]"), - ("Polonius", "[email protected]"), - ] - for i in range(extra_users): - names.append(('Extra User %d' % (i,), 'ldap_extrauser%[email protected]' % (i,))) + names = [] + for i in range(1, num_users+1): + names.append(('LDAP User %d' % (i,), 'ldapuser%[email protected]' % (i,))) ldap_dir = {} if mode == 'a': @@ -336,7 +327,7 @@ def __init__(self) -> None: self.mock_initialize.return_value = self.mock_ldap self.mock_ldap.directory = generate_dev_ldap_dir(settings.FAKE_LDAP_MODE, - settings.FAKE_LDAP_EXTRA_USERS) + settings.FAKE_LDAP_NUM_USERS) def authenticate(self, username: str, password: str, realm: Optional[Realm]=None, return_data: Optional[Dict[str, Any]]=None) -> Optional[UserProfile]: diff --git a/zproject/dev_settings.py b/zproject/dev_settings.py --- a/zproject/dev_settings.py +++ b/zproject/dev_settings.py @@ -103,7 +103,7 @@ # In any case, the LDAP user account data is available in: # zerver/tests/fixtures/ldap_dir.json FAKE_LDAP_MODE = None # type: Optional[str] -FAKE_LDAP_EXTRA_USERS = 0 +# FAKE_LDAP_NUM_USERS = 8 if FAKE_LDAP_MODE: LDAP_APPEND_DOMAIN = None diff --git a/zproject/settings.py b/zproject/settings.py --- a/zproject/settings.py +++ b/zproject/settings.py @@ -144,7 +144,7 @@ def get_secret(key: str, default_value: Optional[Any]=None, # support local development of LDAP auth without an LDAP server. # Detailed docs in zproject/dev_settings.py. 'FAKE_LDAP_MODE': None, - 'FAKE_LDAP_EXTRA_USERS': 0, + 'FAKE_LDAP_NUM_USERS': 8, # Social auth; we support providing values for some of these # settings in zulip-secrets.conf instead of settings.py in development.
diff --git a/zerver/tests/test_auth_backends.py b/zerver/tests/test_auth_backends.py --- a/zerver/tests/test_auth_backends.py +++ b/zerver/tests/test_auth_backends.py @@ -2110,21 +2110,21 @@ def setup_subdomain(self, user_profile: UserProfile) -> None: realm.save() def test_generate_dev_ldap_dir(self) -> None: - ldap_dir = generate_dev_ldap_dir('A', 2) + ldap_dir = generate_dev_ldap_dir('A', 10) self.assertEqual(len(ldap_dir), 10) regex = re.compile(r'(uid\=)+[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+(\,ou\=users\,dc\=zulip\,dc\=com)') for key, value in ldap_dir.items(): self.assertTrue(regex.match(key)) self.assertCountEqual(list(value.keys()), ['cn', 'userPassword']) - ldap_dir = generate_dev_ldap_dir('b', 1) + ldap_dir = generate_dev_ldap_dir('b', 9) self.assertEqual(len(ldap_dir), 9) regex = re.compile(r'(uid\=)+[a-zA-Z0-9_.+-]+(\,ou\=users\,dc\=zulip\,dc\=com)') for key, value in ldap_dir.items(): self.assertTrue(regex.match(key)) self.assertCountEqual(list(value.keys()), ['cn', 'userPassword']) - ldap_dir = generate_dev_ldap_dir('c', 0) + ldap_dir = generate_dev_ldap_dir('c', 8) self.assertEqual(len(ldap_dir), 8) regex = re.compile(r'(uid\=)+[a-zA-Z0-9_.+-]+(\,ou\=users\,dc\=zulip\,dc\=com)') for key, value in ldap_dir.items():
Complete follow-ups to convenient LDAP auth testing system Continuing from #10181, I think there's some high-value follow-ups to this that we should do: * Play a bit with the defaults for username/password; I think it's a mistake to have LDAP users like `[email protected]` with a name "Iago". Would be much better to have totally new names for these users. * Once that's done, document this somewhere; I think my best concept for that is to expand https://zulip.readthedocs.io/en/latest/subsystems/oauth.html to talk about LDAP auth as well (and rename it to `auth.html`).
Hello @zulip/server-authentication members, this issue was labeled with the "area: authentication" label, so you may want to check it out! <!-- areaLabelAddition -->
2018-08-18T03:33:23
zulip/zulip
10,641
zulip__zulip-10641
[ "10639" ]
d8c19cb003b6a042a265ded13f70bf6245ab21fd
diff --git a/zerver/data_import/hipchat.py b/zerver/data_import/hipchat.py --- a/zerver/data_import/hipchat.py +++ b/zerver/data_import/hipchat.py @@ -79,20 +79,13 @@ def convert_user_data(raw_data: List[ZerverFieldsT], realm_id: int) -> List[Zerv for d in raw_data ] - def _is_realm_admin(v: str) -> bool: - if v == 'user': - return False - elif v == 'admin': - return True - else: - raise Exception('unexpected value') - def process(in_dict: ZerverFieldsT) -> ZerverFieldsT: delivery_email = in_dict['email'] email = in_dict['email'] full_name = in_dict['name'] id = in_dict['id'] - is_realm_admin = _is_realm_admin(in_dict['account_type']) + is_realm_admin = in_dict['account_type'] == 'admin' + is_guest = in_dict['account_type'] == 'guest' short_name = in_dict['mention_name'] timezone = in_dict['timezone'] @@ -113,6 +106,7 @@ def process(in_dict: ZerverFieldsT) -> ZerverFieldsT: id=id, is_active=is_active, is_realm_admin=is_realm_admin, + is_guest=is_guest, realm_id=realm_id, short_name=short_name, timezone=timezone, diff --git a/zerver/data_import/import_util.py b/zerver/data_import/import_util.py --- a/zerver/data_import/import_util.py +++ b/zerver/data_import/import_util.py @@ -34,6 +34,7 @@ def build_user(avatar_source: str, id: int, is_active: bool, is_realm_admin: bool, + is_guest: bool, realm_id: int, short_name: str, timezone: str) -> ZerverFieldsT: @@ -47,6 +48,7 @@ def build_user(avatar_source: str, id=id, is_active=is_active, is_realm_admin=is_realm_admin, + is_guest=is_guest, pointer=pointer, realm_id=realm_id, short_name=short_name, diff --git a/zproject/settings.py b/zproject/settings.py --- a/zproject/settings.py +++ b/zproject/settings.py @@ -1313,8 +1313,7 @@ def get_dirs(self): if POPULATE_PROFILE_VIA_LDAP: import ldap - if (AUTH_LDAP_BIND_DN - and ldap.OPT_REFERRALS not in AUTH_LDAP_CONNECTION_OPTIONS): + if (AUTH_LDAP_BIND_DN and ldap.OPT_REFERRALS not in AUTH_LDAP_CONNECTION_OPTIONS): # The default behavior of python-ldap (without setting option # `ldap.OPT_REFERRALS`) is to follow referrals, but anonymously. # If our original query was non-anonymous, that's unlikely to
hipchat conversion: Handle account_type of "guest". Our original sample data didn't include this, but it's pretty easy to map. Make sure `is_realm_admin` gets set to `False` and `is_guest` gets sets to `True` when `account_type` is "guest" inside of `convert_user_data` in `zerver/data_import/hipchat.py`.
Sounds good :)
2018-10-11T22:05:38
zulip/zulip
10,843
zulip__zulip-10843
[ "10515" ]
86801d350abcb54a1fcae76473526bb918ca1d29
diff --git a/zerver/views/auth.py b/zerver/views/auth.py --- a/zerver/views/auth.py +++ b/zerver/views/auth.py @@ -348,6 +348,7 @@ def send_oauth_request_to_google(request: HttpRequest) -> HttpResponse: 'redirect_uri': reverse_on_root('zerver.views.auth.finish_google_oauth2'), 'scope': 'profile email', 'state': csrf_state, + 'prompt': 'select_account', } return redirect(google_uri + urllib.parse.urlencode(params))
registration: "Log in with google" with no account takes you to the wrong page for invite-only realms. To reproduce: * Create an invite-only realm, foo.zulipdev.com * Log in to a google account not corresponding to any user in foo.zulipdev.com. * Go to foo.zulipdev.com and click "Log in with google". * It takes you to a page that says "You need an invitation to join this organization". It should instead take you to a page that allows you to log in to the correct google account. As a V1, it can just give an error like "the google account xxx@domain doesn't have an account in this organization. Please [log in] to the appropriate google account first." In general I suspect there's something wrong in our logic for invite-only realms here; at no point should you ever get to "You need an invitation to join this organization" from the login page.
Hello @zulip/server-authentication members, this issue was labeled with the "area: authentication" label, so you may want to check it out! <!-- areaLabelAddition --> @zulipbot claim > In general I suspect there's something wrong in our logic for invite-only realms here; at no point should you ever get to "You need an invitation to join this organization" from the login page. @rishig I disagree; if you actually don't have an account in the organization, this is exactly the error message you need to receive so that you know how to proceed. Also, I don't think we need a "new page" as part of implementing this; I think we just want to change the error message text on the login page for this case; by leaving it on the login page, the user can then just click "log in with google" or "log in with github" or whatever at that time. @timabbott But why the user is forced to log in again if he doesn't have an invitation. He should be logged in and shall ask for something like 'Request to get invitation' if he wishes. Please let me know if that makes sense to you? @zulipbot abandon Well, there are two cases on the user end: * The user has an account, but it's associated with a different one of the user's Google account (i.e. they clicked the wrong one or actually only have an account associated with their GitHub account). In that case, they should be put somewhere where they can pick how they want to try to login next. * The user doesn't have an account, in which case we should let them know they need an invitation from an organization administrator. It's not clear if we want to offer a "request an invitation" feature; there are spam/abuse issues to think about there. Ignoring the general point of there being a smell in this flow, what's needed in this specific case is some way to log in to your correct google account. Putting an error on the /login page isn't going to help. I'm not suggesting a new page. We already have the correct behavior if you're logged in to 0 google accounts, and I'm suggesting we should just copy that behavior. To address the second bullet, I think we can add a generic statement below the /login box: "Don't have an account yet? Contact the organization administrator for an invitation." (no "request an invitation" feature). I can add an issue to add that text, if that seems like a fine solution. I think we might have something like that already at the bottom of the left side for invite-only realms? @rishig can you make a hacky screenshot/drawing of what you want things to look like? What I'd expect is to just get put back on the login screen and make a different choice (if you click "login with google", it lets you pick a different one), or you can do "login with github" if it turns out you don't have a google account. > we might have something like that already at the bottom of the left side for invite-only realms? we currently don't > can you make a hacky screenshot/drawing of what you want things to look like? If you click "Log in with Google" and aren't signed in to an appropriate google account, I'd want a screen like: ![image](https://user-images.githubusercontent.com/890911/45904488-0de0fb80-bda2-11e8-9acd-b1fa7e0f9e99.png) I don't think that's possible with Google oauth. Google's auth API doesn't give us the set of users the user is logged into; instead, we redirect to Google's website, the UI to pick a Google account is on that site, and Google redirects back to us data on just the Google account the user selected. At no point do we receive the list of options. I see. ok, in that case I'm fine with the "error message on /login" solution. Something like: [email protected] doesn't have a account in this Zulip organization. Go to google.com/accounts (or whatever it is) to log into the Google account associated with your Zulip account. Opened "put generic text at the bottom of the login box" as #10542. ok, I think Zapier is implementing the flow that I want: ![image](https://user-images.githubusercontent.com/890911/45977148-8ee7ff00-bffd-11e8-81a2-af5ea591636d.png) If the user is signed in to 0 or >= 2 Google accounts, we already do the correct thing. If the user is signed in to 1 Google account, I think the correct flow is: * Google will send us the email of the one account they are signed in to. * If the email of the google account corresponds to a zulip account in that realm, great! Log them in. * If the email of the google account does not correspond to a zulip account in the realm, show them the dialog box above. That page is part of Google's site, so not directly under Zapier's control. If it is, either we already get that behavior (and you're just doing the testing a bit differently between the two, with e.g. Google's cookies remembering your previous selections making the difference; I suspect it's actually this going on), or there must be a setting we can provide that changes the behavior for single-google-account. In that case, the next step is to figure out where in Google's maze of documentation they discuss this (I just spent 10 minutes looking around https://developers.google.com/identity/protocols/OAuth2, without luck). In other words, I don't know how to do the "show them the dialog box above" action in your suggestion with Google's OAuth system. yup, agreed it is likely a one line change in how we talk to Google. To make the problem more concrete: on the desktop app, I think this means you can't log in at all (via oauth) to two realms using two different google accounts. From some more googling: * https://stackoverflow.com/questions/37711665/forcing-a-user-to-choose-an-account-via-google-oauth2 * https://github.com/discourse/discourse/commit/24d0a7a4c71ba5147a5741c7a19dd061296098a0 So maybe we need to add something like `&prompt=select_account+consent` to the URL we send to Google. I'm not actually sure what those settings do. If you want to do some spelunking, see (1) what URL Zapier generates and if it has any of those settings and then (2) try that out in a Zulip development environment (we have docs for setting up Google auth there); patching `params` in `send_oauth_request_to_google`. Aha! https://developers.google.com/identity/protocols/OAuth2WebServer#creatingclient has the official documentation for the parameters they support. Adding `priority: high` now that a user has actually run into the desktop app problem: https://chat.zulip.org/#narrow/stream/9-issues/topic/Desktop.20App.20multiple.20Invite.20Only @zulipbot claim
2018-11-15T15:46:54
zulip/zulip
11,027
zulip__zulip-11027
[ "11014" ]
982a70431f18a2b75d6d287748ff5ce2b243a60f
diff --git a/zerver/management/commands/initialize_voyager_db.py b/zerver/management/commands/initialize_voyager_db.py --- a/zerver/management/commands/initialize_voyager_db.py +++ b/zerver/management/commands/initialize_voyager_db.py @@ -3,7 +3,6 @@ from typing import Any, Iterable, Tuple, Optional from django.conf import settings -from django.contrib.sites.models import Site from django.core.management.base import BaseCommand from zerver.lib.bulk_create import bulk_create_users @@ -58,7 +57,3 @@ def handle(self, *args: Any, **options: Any) -> None: self.stdout.write("Successfully populated database with initial data.\n") self.stdout.write("Please run ./manage.py generate_realm_creation_link " "to generate link for creating organization") - - site = Site.objects.get_current() - site.domain = settings.EXTERNAL_HOST - site.save() diff --git a/zproject/settings.py b/zproject/settings.py --- a/zproject/settings.py +++ b/zproject/settings.py @@ -492,13 +492,6 @@ def get_config(section: str, key: str, default_value: Optional[Any]=None) -> Opt # http://www.i18nguy.com/unicode/language-identifiers.html LANGUAGE_CODE = 'en-us' -# The ID, as an integer, of the current site in the django_site database table. -# This is used so that application data can hook into specific site(s) and a -# single database can manage content for multiple sites. -# -# We set this site's string_id to 'zulip' in populate_db. -SITE_ID = 1 - # If you set this to False, Django will make some optimizations so as not # to load the internationalization machinery. USE_I18N = True @@ -566,7 +559,6 @@ def get_dirs(self): 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', - 'django.contrib.sites', 'django.contrib.staticfiles', 'confirmation', 'pipeline',
diff --git a/zerver/tests/test_signup.py b/zerver/tests/test_signup.py --- a/zerver/tests/test_signup.py +++ b/zerver/tests/test_signup.py @@ -6,7 +6,6 @@ import django_otp from django.conf import settings from django.contrib.contenttypes.models import ContentType -from django.contrib.sites.models import Site from django.http import HttpResponse, HttpRequest from django.test import TestCase, override_settings from django.utils.timezone import now as timezone_now @@ -459,7 +458,6 @@ def test_register(self) -> None: # Clear all the caches. flush_per_request_caches() ContentType.objects.clear_cache() - Site.objects.clear_cache() with queries_captured() as queries: self.register(self.nonreg_email('test'), "test")
two factor authentication domain name Is it possible to configure the "domain" part or label of the generated QR-Code somewhere ? Right now it seems to always be "example.net" if I scan the QR-Code.
The native 2FA integration in Zulip is unfinished (that's why it's turned off by default). #10921 has some more details. This is in Google authenticator? It's a good question where that would come from; we don't have the string "example.net" anywhere in our codebase or in that of django-two-factor, the library we use for this. Yeah, I think so, too. Must be somewhere near https://github.com/Bouke/django-two-factor-auth/blob/d000074089ff02ee616a0c5f2e6b03d175f3b9ff/two_factor/views/core.py#L557 I guess, but I am not very good with python and django tbh :-) We tried to set OTP_HOTP_ISSUER in the settings.py, but that did not help. I tried a bit around with the "Sites" Framework in the shell and I found: ``` In [7]: from django.contrib.sites.models import Site In [8]: Site.objects.get_current().domain Out[8]: 'collab.oss.ewetel.de' In [9]: Site.objects.get_current().name Out[9]: 'example.com' ``` Could be that its coming from there ? I just have no idea how to actually change it. And is there a way to change the domain, too ? That url is not the right one anymore anyways. Ok, found out how to change + save it with the shell. Now the QR-Code is OK :-) I have no idea where/if the sites Framework is used in zulip though. Would be cool if it would be possible to set this in the settings maybe Ugh, it uses the Django "Sites" framework. We don't use that for anything in Zulip (other than, apparently indirectly, this). I think our near-term fix might be to monkey-patch `from django.contrib.sites.shortcuts import get_current_site` to return effectively the `realm.host` value, since that's where to get the correct value for the current realm/subdomain. Thanks for investigating! Changing that `issuer` line to ` issuer=request.user.realm.host` seems to do the right thing. So we just need to figure out a good way to make that change without forking social-auth. Ahh, there's a better solution: If we just remove the Site Django middleware (which we're not using), it'll automatically do the right thing. So I'll do that.
2018-12-12T22:17:12
zulip/zulip
11,054
zulip__zulip-11054
[ "10534" ]
32f24bff8d199d582052671d8d2b09ff950e4a8d
diff --git a/scripts/lib/zulip_tools.py b/scripts/lib/zulip_tools.py --- a/scripts/lib/zulip_tools.py +++ b/scripts/lib/zulip_tools.py @@ -15,9 +15,10 @@ import time import json import uuid +import configparser if False: - from typing import Sequence, Set, Any, Dict, List + from typing import Sequence, Set, Any, Dict, List, Optional DEPLOYMENTS_DIR = "/home/zulip/deployments" LOCK_DIR = os.path.join(DEPLOYMENTS_DIR, "lock") @@ -410,3 +411,18 @@ def assert_running_as_root(strip_lib_from_paths: bool=False) -> None: if not is_root(): print("{} must be run as root.".format(script_name)) sys.exit(1) + +def get_config(config_file, section, key, default_value=""): + # type: (configparser.RawConfigParser, str, str, str) -> str + if config_file.has_option(section, key): + return config_file.get(section, key) + return default_value + +def get_config_file() -> configparser.RawConfigParser: + config_file = configparser.RawConfigParser() + config_file.read("/etc/zulip/zulip.conf") + return config_file + +def get_deploy_options(config_file): + # type: (configparser.RawConfigParser) -> List[str] + return get_config(config_file, 'deployment', 'deploy_options', "").strip().split()
upgrade-zulip should respect the same options as upgrade-zulip-from-git Specifically around things like skipping migrations and puppet. This should be easy to do if we refactor the argument parsing login into a function in `zulip_tools.py` and then import it from both places, for example.
Hello @zulip/server-production members, this issue was labeled with the "area: production installer" label, so you may want to check it out! <!-- areaLabelAddition --> @zulipbot claim Hello @shubham-padia, you have been unassigned from this issue because you have not updated this issue or any referenced pull requests for over 14 days. You can reclaim this issue or claim any other issue by commenting `@zulipbot claim` on that issue. Thanks for your contributions, and hope to see you again soon!
2018-12-15T21:00:19
zulip/zulip
11,317
zulip__zulip-11317
[ "10673" ]
4480846aa55dea00eef8d4916031ab645a6f397c
diff --git a/zerver/lib/bugdown/api_arguments_table_generator.py b/zerver/lib/bugdown/api_arguments_table_generator.py --- a/zerver/lib/bugdown/api_arguments_table_generator.py +++ b/zerver/lib/bugdown/api_arguments_table_generator.py @@ -105,7 +105,7 @@ def render_table(self, arguments: List[Dict[str, Any]]) -> List[str]: tr = """ <tr> <td><code>{argument}</code></td> - <td><code>{example}</code></td> + <td class="json-api-example"><code>{example}</code></td> <td>{required}</td> <td>{description}</td> </tr>
Improve formatting for "arguments" sections with long examples. The line-wrapping for this endpoint's API documentation looks really ugly: ![image](https://user-images.githubusercontent.com/2746074/47042583-7e303200-d140-11e8-9b4f-d6fc1325dcba.png) We should either remove the maximum width on "description", or figure out a way to use more than one line for a given endpoint that doesn't look so bad (e.g. having the example be on the next line after the rest of the endpoint description).
Hello @zulip/server-api members, this issue was labeled with the "area: documentation (api and integrations)" label, so you may want to check it out! <!-- areaLabelAddition --> I think formatting the example JSON data with a more vertical style would help make the example more readable and reduce the width of the "Example" column to give more room for "Description". Here's a link to the live docs: https://chat.zulip.org/api/update-subscription-properties @zulipbot claim
2019-01-19T14:03:52
zulip/zulip
11,359
zulip__zulip-11359
[ "10909" ]
893e1475bb6a64fc16aa87c2789b6d7e5c2a1720
diff --git a/zerver/webhooks/stripe/view.py b/zerver/webhooks/stripe/view.py --- a/zerver/webhooks/stripe/view.py +++ b/zerver/webhooks/stripe/view.py @@ -14,6 +14,19 @@ UnexpectedWebhookEventType from zerver.models import UserProfile +INVOICE_INFO = { + "created": "has been created", + "deleted": "has been deleted", + "finalized": "has been finalized", + "marked_uncollectible": "has been marked as uncollectible", + "payment_failed": "has failed", + "payment_succeeded": "has succeeded", + "sent": "has been sent", + "upcoming": "has been created", + "updated": "has been updated", + "voided": "has been voided" +} + class SuppressedEvent(Exception): pass @@ -148,18 +161,6 @@ def default_body(update_blacklist: List[str]=[]) -> str: topic = 'files' body = default_body() + ' ({purpose}). \nTitle: {title}'.format( purpose=object_['purpose'].replace('_', ' '), title=object_['title']) - if category == 'invoice': - if event == 'upcoming': # nocoverage - body = 'Upcoming invoice created' - else: - body = default_body(update_blacklist=['lines', 'description', 'number', 'finalized_at']) - if event == 'created': # nocoverage - # Could potentially add link to invoice PDF here - body += ' ({reason})\nBilling method: {method}\nTotal: {total}\nAmount due: {due}'.format( - reason=object_['billing_reason'].replace('_', ' '), - method=object_['billing'].replace('_', ' '), - total=amount_string(object_['total'], object_['currency']), - due=amount_string(object_['amount_due'], object_['currency'])) if category == 'invoiceitem': # nocoverage body = default_body(update_blacklist=['description']) if event == 'created': @@ -198,6 +199,28 @@ def default_body(update_blacklist: List[str]=[]) -> str: amount=amount, end=end ) + + if category.startswith('invoice'): + object_id = object_['id'] + link = "https://dashboard.stripe.com/invoices/{}".format(object_id) + amount = amount_string(object_["amount_due"], object_["currency"]) + body_t = "An {invoice_type} **[invoice]({link})** for the payment of amount **{amount}** {end}." + + end = INVOICE_INFO.get(event) # type: ignore # expression has type Optional[str] + + if event == "upcoming": + invoice_type = "upcoming" + else: + invoice_type = "" + + topic = "invoice {}".format(object_id) + body = body_t.format( + invoice_type = invoice_type, + link=link, + amount=amount, + end=end + ) + if category in ['payment_intent', 'plan', 'product', 'recipient', 'reporting', 'review', 'sigma', 'sku', 'source', 'subscription_schedule', 'topup', 'transfer']: # nocoverage
diff --git a/zerver/webhooks/stripe/tests.py b/zerver/webhooks/stripe/tests.py --- a/zerver/webhooks/stripe/tests.py +++ b/zerver/webhooks/stripe/tests.py @@ -112,12 +112,6 @@ def test_customer_discount_created(self) -> None: self.send_and_test_stream_message('customer_discount_created', expected_topic, expected_message, content_type="application/x-www-form-urlencoded") - def test_invoice_payment_failed(self) -> None: - expected_topic = u"cus_00000000000000" - expected_message = u"[Invoice](https://dashboard.stripe.com/invoices/in_00000000000000) payment failed" - self.send_and_test_stream_message('invoice_payment_failed', expected_topic, expected_message, - content_type="application/x-www-form-urlencoded") - def test_payout_canceled(self) -> None: expected_topic = u"Payout po_00000000000000" expected_message = u"**[Payout](https://dashboard.stripe.com/payout/po_00000000000000)** for amount **$11.00** has been canceled." @@ -152,3 +146,62 @@ def test_payout_updated(self) -> None: self.send_and_test_stream_message('payout_updated', expected_topic, expected_message, content_type="application/x-www-form-urlencoded") + + def test_invoice_created(self) -> None: + expected_topic = u"invoice in_00000000000000" + expected_message = u"An **[invoice](https://dashboard.stripe.com/invoices/in_00000000000000)** for the payment of amount **$10.00** has been created." + self.send_and_test_stream_message('invoice_created', expected_topic, expected_message, + content_type="application/x-www-form-urlencoded") + + def test_invoice_deleted(self) -> None: + expected_topic = u"invoice in_00000000000000" + expected_message = u"An **[invoice](https://dashboard.stripe.com/invoices/in_00000000000000)** for the payment of amount **$10.00** has been deleted." + self.send_and_test_stream_message('invoice_deleted', expected_topic, expected_message, + content_type="application/x-www-form-urlencoded") + + def test_invoice_finalized(self) -> None: + expected_topic = u"invoice in_00000000000000" + expected_message = u"An **[invoice](https://dashboard.stripe.com/invoices/in_00000000000000)** for the payment of amount **$10.00** has been finalized." + self.send_and_test_stream_message('invoice_finalized', expected_topic, expected_message, + content_type="application/x-www-form-urlencoded") + + def test_invoice_marked_uncollectible(self) -> None: + expected_topic = u"invoice in_00000000000000" + expected_message = u"An **[invoice](https://dashboard.stripe.com/invoices/in_00000000000000)** for the payment of amount **$10.00** has been marked as uncollectible." + self.send_and_test_stream_message('invoice_marked_uncollectible', expected_topic, expected_message, content_type="application/x-www-form-urlencoded") + + def test_invoice_payment_failed(self) -> None: + expected_topic = u"invoice in_00000000000000" + expected_message = u"An **[invoice](https://dashboard.stripe.com/invoices/in_00000000000000)** for the payment of amount **$10.00** has failed." + self.send_and_test_stream_message('invoice_payment_failed', expected_topic, expected_message, + content_type="application/x-www-form-urlencoded") + + def test_invoice_payment_succeeded(self) -> None: + expected_topic = u"invoice in_00000000000000" + expected_message = u"An **[invoice](https://dashboard.stripe.com/invoices/in_00000000000000)** for the payment of amount **$10.00** has succeeded." + self.send_and_test_stream_message('invoice_payment_succeeded', expected_topic, expected_message, + content_type="application/x-www-form-urlencoded") + + def test_invoice_sent(self) -> None: + expected_topic = u"invoice in_00000000000000" + expected_message = u"An **[invoice](https://dashboard.stripe.com/invoices/in_00000000000000)** for the payment of amount **$10.00** has been sent." + self.send_and_test_stream_message('invoice_sent', expected_topic, expected_message, + content_type="application/x-www-form-urlencoded") + + def test_invoice_upcoming(self) -> None: + expected_topic = u"invoice None" + expected_message = u"An upcoming **[invoice](https://dashboard.stripe.com/invoices/None)** for the payment of amount **$10.00** has been created." + self.send_and_test_stream_message('invoice_upcoming', expected_topic, expected_message, + content_type="application/x-www-form-urlencoded") + + def test_invoice_updated(self) -> None: + expected_topic = u"invoice in_00000000000000" + expected_message = u"An **[invoice](https://dashboard.stripe.com/invoices/in_00000000000000)** for the payment of amount **$10.00** has been updated." + self.send_and_test_stream_message('invoice_updated', expected_topic, expected_message, + content_type="application/x-www-form-urlencoded") + + def test_invoice_voided(self) -> None: + expected_topic = u"invoice in_00000000000000" + expected_message = u"An **[invoice](https://dashboard.stripe.com/invoices/in_00000000000000)** for the payment of amount **$10.00** has been voided." + self.send_and_test_stream_message('invoice_voided', expected_topic, expected_message, + content_type="application/x-www-form-urlencoded")
webhooks/stripe: Handle invoice and payout events explicitly. Currently, we support `invoice` events in our Stripe webhooks, but only implicitly (i.e., we let a generic function `default_body()` create a message for most `invoice` events). We don't support `payout` events at all. We should probably consider supporting these events in one form or another. @timabbott, @rishig: Opening this so we don't forget. I'll add this to my queue for when I get around to restructuring the webhook in general. Thanks! :)
Hello @zulip/server-integrations members, this issue was labeled with the "area: integrations" label, so you may want to check it out! <!-- areaLabelAddition --> @zulipbot claim Hello @sameerchoubey! Thanks for your interest in Zulip! You have attempted to claim an issue without the labels "help wanted", "good first issue". Since you're a new contributor, you can only claim and submit pull requests for issues with the [help wanted](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+no%3Aassignee+label%3A%22help+wanted%22) or [good first issue](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+no%3Aassignee+label%3A%22good+first+issue%22) labels. If this is your first time here, we recommend reading our [guide for new contributors](https://zulip.readthedocs.io/en/latest/overview/contributing.html) before getting started. I'm working on this. @zulipbot claim Welcome to Zulip, @sameerchoubey! We just sent you an invite to collaborate on this repository at https://github.com/zulip/zulip/invitations. Please accept this invite in order to claim this issue and begin a fun, rewarding experience contributing to Zulip! Here's some tips to get you off to a good start: * Join me on the [Zulip developers' server](https://chat.zulip.org), to get help, chat about this issue, and meet the other developers. * Sign the [Dropbox Contributor License Agreement](https://opensource.dropbox.com/cla/), so that Zulip can use your code. * [Unwatch this repository](https://help.github.com/articles/unwatching-repositories/), so that you don't get 100 emails a day. As you work on this issue, you'll also want to refer to the [Zulip code contribution guide](https://zulip.readthedocs.io/en/latest/contributing/index.html), as well as the rest of the developer documentation on that site. See you on the other side (that is, the pull request side)! @zulipbot claim
2019-01-26T10:47:33
zulip/zulip
11,602
zulip__zulip-11602
[ "11590" ]
9faa009f661d5f0a1df22f789f5ad7edf5dd2ac7
diff --git a/zerver/lib/integrations.py b/zerver/lib/integrations.py --- a/zerver/lib/integrations.py +++ b/zerver/lib/integrations.py @@ -463,7 +463,8 @@ def __init__(self, name: str, *args: Any, **kwargs: Any) -> None: BOT_INTEGRATIONS = [ BotIntegration('github_detail', ['version-control', 'bots'], display_name='GitHub Detail'), - BotIntegration('xkcd', ['bots', 'misc'], display_name='xkcd'), + BotIntegration('xkcd', ['bots', 'misc'], display_name='xkcd', + logo='static/images/integrations/logos/xkcd.png'), ] # type: List[BotIntegration] HUBOT_INTEGRATIONS = [
integrations: Add missing logos. We seem to be missing logos for Instagram, Twitter, and YouTube. Also, the logo for Groove is too small (needs to be trimmed), and the logos for irc, matrix, and xkcd are too big (need to add some left and right padding probably to the container that holds the logo).
Hello @zulip/server-api members, this issue was labeled with the "area: documentation (api and integrations)" label, so you may want to check it out! <!-- areaLabelAddition --> Isn't `./static/images/integrations/logos/twitter.svg` a Twitter logo? And ./static/images/integrations/logos/instagram.png for instagram? ./static/images/integrations/logos/youtube.svg for youtube. Logos appear for me on zulipchat.com/integrations... oh weird. Here's what it is for me, on zulipchat.com/integrations: ![image](https://user-images.githubusercontent.com/890911/52904944-a364a800-31e7-11e9-8b1a-31d86bfcb12d.png) ![image](https://user-images.githubusercontent.com/890911/52904949-b1b2c400-31e7-11e9-928c-c372ce67e5fd.png) ![image](https://user-images.githubusercontent.com/890911/52904953-c000e000-31e7-11e9-9200-d2d5f083d725.png) Check your browser console "Network" tab and/or inspect element on those for what's going on here. I wouldn't be surprised if this turns out to be a browser extension that's trying to hide social buttons for Twitter/Instagram/Youtube (or something), given that those are exactly the list of integrations that have that kind of feature? ![image](https://user-images.githubusercontent.com/2746074/52905667-27239200-31f2-11e9-967f-fb8de5edea8f.png)
2019-02-17T19:49:52
zulip/zulip
11,992
zulip__zulip-11992
[ "11992" ]
23856fb0ab9db1d0d2658d5f83c08682b42888ab
diff --git a/zerver/webhooks/bitbucket3/view.py b/zerver/webhooks/bitbucket3/view.py --- a/zerver/webhooks/bitbucket3/view.py +++ b/zerver/webhooks/bitbucket3/view.py @@ -10,26 +10,54 @@ from zerver.lib.response import json_success from zerver.lib.webhooks.git import TOPIC_WITH_BRANCH_TEMPLATE, \ get_push_tag_event_message, get_remove_branch_event_message, \ - get_create_branch_event_message, get_commits_comment_action_message + get_create_branch_event_message, get_commits_comment_action_message, \ + TOPIC_WITH_PR_OR_ISSUE_INFO_TEMPLATE, get_pull_request_event_message, \ + CONTENT_MESSAGE_TEMPLATE from zerver.lib.webhooks.common import check_send_webhook_message, \ UnexpectedWebhookEventType from zerver.webhooks.bitbucket2.view import BITBUCKET_TOPIC_TEMPLATE, \ BITBUCKET_FORK_BODY, BITBUCKET_REPO_UPDATED_CHANGED -BRANCH_UPDATED_MESSAGE_TEMPLATE = "{user_name} pushed to branch {branch_name}. Head is now {head}" +BRANCH_UPDATED_MESSAGE_TEMPLATE = "{user_name} pushed to branch {branch_name}. Head is now {head}." +PULL_REQUEST_MARKED_AS_NEEDS_WORK_TEMPLATE = """{user_name} marked [PR #{number}]({url}) \ +as \"needs work\"""" +PULL_REQUEST_MARKED_AS_NEEDS_WORK_TEMPLATE_WITH_TITLE = """{user_name} marked \ +[PR #{number} {title}]({url}) as \"needs work\"""" +PULL_REQUEST_REASSIGNED_TEMPLATE = """{user_name} reassigned [PR #{number}]({url}) \ +to {assignees}""" +PULL_REQUEST_REASSIGNED_TEMPLATE_WITH_TITLE = """{user_name} reassigned [PR #{number} \ +{title}]({url}) to {assignees}""" +PULL_REQUEST_REASSIGNED_TO_NONE_TEMPLATE = """{user_name} removed all reviewers from [PR \ +#{number}]({url})""" +PULL_REQUEST_REASSIGNED_TO_NONE_TEMPLATE_WITH_TITLE = """{user_name} removed all reviewers \ +from [PR #{number} {title}]({url})""" +PULL_REQUEST_OPENED_OR_MODIFIED_TEMPLATE_WITH_REVIEWERS = """{user_name} {action} [PR #{number}]\ +({url})\nfrom `{source}` to `{destination}` (assigned to {assignees} for review)""" +PULL_REQUEST_OPENED_OR_MODIFIED_TEMPLATE_WITH_REVIEWERS_WITH_TITLE = """{user_name} {action} \ +[PR #{number} {title}]({url})\nfrom `{source}` to `{destination}` (assigned to {assignees} for \ +review)""" + +def get_user_name(payload: Dict[str, Any]) -> str: + user_name = "[{name}]({url})".format(name=payload["actor"]["name"], + url=payload["actor"]["links"]["self"][0]["href"]) + return user_name def repo_comment_handler(payload: Dict[str, Any], action: str) -> List[Dict[str, str]]: repo_name = payload["repository"]["name"] - user_name = payload["actor"]["name"] subject = BITBUCKET_TOPIC_TEMPLATE.format(repository_name=repo_name) sha = payload["commit"] commit_url = payload["repository"]["links"]["self"][0]["href"][:-6] # remove the "browse" at the end commit_url += "commits/%s" % (sha,) - body = get_commits_comment_action_message(user_name=user_name, - action=action, - commit_url=commit_url, - sha=sha, - message=payload["comment"]["text"]) + message = payload["comment"]["text"] + if action == "deleted their comment": + message = "~~{message}~~".format(message=message) + body = get_commits_comment_action_message( + user_name=get_user_name(payload), + action=action, + commit_url=commit_url, + sha=sha, + message=message + ) return [{"subject": subject, "body": body}] def repo_forked_handler(payload: Dict[str, Any]) -> List[Dict[str, str]]: @@ -37,7 +65,7 @@ def repo_forked_handler(payload: Dict[str, Any]) -> List[Dict[str, str]]: subject = BITBUCKET_TOPIC_TEMPLATE.format(repository_name=repo_name) body = BITBUCKET_FORK_BODY.format( display_name=payload["actor"]["displayName"], - username=payload["actor"]["name"], + username=get_user_name(payload), fork_name=payload["repository"]["name"], fork_url=payload["repository"]["links"]["self"][0]["href"] ) @@ -46,7 +74,7 @@ def repo_forked_handler(payload: Dict[str, Any]) -> List[Dict[str, str]]: def repo_modified_handler(payload: Dict[str, Any]) -> List[Dict[str, str]]: subject_new = BITBUCKET_TOPIC_TEMPLATE.format(repository_name=payload["new"]["name"]) body = BITBUCKET_REPO_UPDATED_CHANGED.format( - actor=payload["actor"]["name"], + actor=get_user_name(payload), change="name", repo_name=payload["old"]["name"], old=payload["old"]["name"], @@ -57,16 +85,22 @@ def repo_modified_handler(payload: Dict[str, Any]) -> List[Dict[str, str]]: def repo_push_branch_data(payload: Dict[str, Any], change: Dict[str, Any]) -> Dict[str, str]: event_type = change["type"] repo_name = payload["repository"]["name"] - user_name = payload["actor"]["name"] + user_name = get_user_name(payload) branch_name = change["ref"]["displayId"] branch_head = change["toHash"] if event_type == "ADD": - body = get_create_branch_event_message(user_name=user_name, url=None, branch_name=branch_name) + body = get_create_branch_event_message( + user_name=user_name, + url=None, + branch_name=branch_name + ) elif event_type == "UPDATE": - body = BRANCH_UPDATED_MESSAGE_TEMPLATE.format(user_name=user_name, - branch_name=branch_name, - head=branch_head) + body = BRANCH_UPDATED_MESSAGE_TEMPLATE.format( + user_name=user_name, + branch_name=branch_name, + head=branch_head + ) elif event_type == "DELETE": body = get_remove_branch_event_message(user_name, branch_name) else: @@ -80,7 +114,6 @@ def repo_push_tag_data(payload: Dict[str, Any], change: Dict[str, Any]) -> Dict[ event_type = change["type"] repo_name = payload["repository"]["name"] tag_name = change["ref"]["displayId"] - user_name = payload["actor"]["name"] if event_type == "ADD": action = "pushed" @@ -91,13 +124,11 @@ def repo_push_tag_data(payload: Dict[str, Any], change: Dict[str, Any]) -> Dict[ raise UnexpectedWebhookEventType("BitBucket Server", message) subject = BITBUCKET_TOPIC_TEMPLATE.format(repository_name=repo_name) - body = get_push_tag_event_message( - user_name, - tag_name, - action=action) + body = get_push_tag_event_message(get_user_name(payload), tag_name, action=action) return {"subject": subject, "body": body} -def repo_push_handler(payload: Dict[str, Any], branches: Optional[str]=None) -> List[Dict[str, str]]: +def repo_push_handler(payload: Dict[str, Any], branches: Optional[str]=None + ) -> List[Dict[str, str]]: data = [] for change in payload["changes"]: event_target_type = change["ref"]["type"] @@ -114,6 +145,152 @@ def repo_push_handler(payload: Dict[str, Any], branches: Optional[str]=None) -> raise UnexpectedWebhookEventType("BitBucket Server", message) return data +def get_assignees_string(pr: Dict[str, Any]) -> Optional[str]: + reviewers = [] + for reviewer in pr["reviewers"]: + name = reviewer["user"]["name"] + link = reviewer["user"]["links"]["self"][0]["href"] + reviewers.append("[%s](%s)" % (name, link)) + if len(reviewers) == 0: + assignees = None + elif len(reviewers) == 1: + assignees = reviewers[0] + else: + assignees = ", ".join(reviewers[:-1]) + " and " + reviewers[-1] + return assignees + +def get_pr_subject(repo: str, type: str, id: str, title: str) -> str: + return TOPIC_WITH_PR_OR_ISSUE_INFO_TEMPLATE.format(repo=repo, type=type, id=id, title=title) + +def get_simple_pr_body(payload: Dict[str, Any], action: str, include_title: Optional[bool]) -> str: + pr = payload["pullRequest"] + return get_pull_request_event_message( + user_name=get_user_name(payload), + action=action, + url=pr["links"]["self"][0]["href"], + number=pr["id"], + title=pr["title"] if include_title else None + ) + +def get_pr_opened_or_modified_body(payload: Dict[str, Any], action: str, + include_title: Optional[bool]) -> str: + pr = payload["pullRequest"] + description = pr.get("description") + assignees_string = get_assignees_string(pr) + if assignees_string: + # Then use the custom message template for this particular integration so that we can + # specify the reviewers at the end of the message (but before the description/message). + parameters = {"user_name": get_user_name(payload), + "action": action, + "url": pr["links"]["self"][0]["href"], + "number": pr["id"], + "source": pr["fromRef"]["displayId"], + "destination": pr["toRef"]["displayId"], + "message": description, + "assignees": assignees_string, + "title": pr["title"] if include_title else None} + if include_title: + body = PULL_REQUEST_OPENED_OR_MODIFIED_TEMPLATE_WITH_REVIEWERS_WITH_TITLE.format( + **parameters + ) + else: + body = PULL_REQUEST_OPENED_OR_MODIFIED_TEMPLATE_WITH_REVIEWERS.format(**parameters) + if description: + body += '\n' + CONTENT_MESSAGE_TEMPLATE.format(message=description) + return body + return get_pull_request_event_message( + user_name=get_user_name(payload), + action=action, + url=pr["links"]["self"][0]["href"], + number=pr["id"], + target_branch=pr["fromRef"]["displayId"], + base_branch=pr["toRef"]["displayId"], + message=pr.get("description"), + assignee=assignees_string if assignees_string else None, + title=pr["title"] if include_title else None + ) + +def get_pr_needs_work_body(payload: Dict[str, Any], include_title: Optional[bool]) -> str: + pr = payload["pullRequest"] + if not include_title: + return PULL_REQUEST_MARKED_AS_NEEDS_WORK_TEMPLATE.format( + user_name=get_user_name(payload), + number=pr["id"], + url=pr["links"]["self"][0]["href"] + ) + return PULL_REQUEST_MARKED_AS_NEEDS_WORK_TEMPLATE_WITH_TITLE.format( + user_name=get_user_name(payload), + number=pr["id"], + url=pr["links"]["self"][0]["href"], + title=pr["title"] + ) + +def get_pr_reassigned_body(payload: Dict[str, Any], include_title: Optional[bool]) -> str: + pr = payload["pullRequest"] + assignees_string = get_assignees_string(pr) + if not assignees_string: + if not include_title: + return PULL_REQUEST_REASSIGNED_TO_NONE_TEMPLATE.format( + user_name=get_user_name(payload), + number=pr["id"], + url=pr["links"]["self"][0]["href"] + ) + return PULL_REQUEST_REASSIGNED_TO_NONE_TEMPLATE_WITH_TITLE.format( + user_name=get_user_name(payload), + number=pr["id"], + url=pr["links"]["self"][0]["href"], + title=pr["title"] + ) + if not include_title: + return PULL_REQUEST_REASSIGNED_TEMPLATE.format( + user_name=get_user_name(payload), + number=pr["id"], + url=pr["links"]["self"][0]["href"], + assignees=assignees_string + ) + return PULL_REQUEST_REASSIGNED_TEMPLATE_WITH_TITLE.format( + user_name=get_user_name(payload), + number=pr["id"], + url=pr["links"]["self"][0]["href"], + assignees=assignees_string, + title=pr["title"] + ) + +def pr_handler(payload: Dict[str, Any], action: str, + include_title: Optional[bool]=False) -> List[Dict[str, str]]: + pr = payload["pullRequest"] + subject = get_pr_subject(pr["toRef"]["repository"]["name"], type="PR", id=pr["id"], + title=pr["title"]) + if action in ["opened", "modified"]: + body = get_pr_opened_or_modified_body(payload, action, include_title) + elif action == "needs_work": + body = get_pr_needs_work_body(payload, include_title) + elif action == "reviewers_updated": + body = get_pr_reassigned_body(payload, include_title) + else: + body = get_simple_pr_body(payload, action, include_title) + + return [{"subject": subject, "body": body}] + +def pr_comment_handler(payload: Dict[str, Any], action: str, + include_title: Optional[bool]=False) -> List[Dict[str, str]]: + pr = payload["pullRequest"] + subject = get_pr_subject(pr["toRef"]["repository"]["name"], type="PR", id=pr["id"], + title=pr["title"]) + message = payload["comment"]["text"] + if action == "deleted their comment on": + message = "~~{message}~~".format(message=message) + body = get_pull_request_event_message( + user_name=get_user_name(payload), + action=action, + url=pr["links"]["self"][0]["href"], + number=pr["id"], + message=message, + title=pr["title"] if include_title else None + ) + + return [{"subject": subject, "body": body}] + EVENT_HANDLER_MAP = { "repo:comment:added": partial(repo_comment_handler, action="commented"), "repo:comment:edited": partial(repo_comment_handler, action="edited their comment"), @@ -121,18 +298,18 @@ def repo_push_handler(payload: Dict[str, Any], branches: Optional[str]=None) -> "repo:forked": repo_forked_handler, "repo:modified": repo_modified_handler, "repo:refs_changed": repo_push_handler, - "pr:comment:added": None, - "pr:comment:edited": None, - "pr:comment:deleted": None, - "pr:declined": None, - "pr:deleted": None, - "pr:merged": None, - "pr:modified": None, - "pr:opened": None, - "pr:reviewer:approved": None, - "pr:reviewer:needs_work": None, - "pr:reviewer:updated": None, - "pr:reviewer:unapproved": None, + "pr:comment:added": partial(pr_comment_handler, action="commented on"), + "pr:comment:edited": partial(pr_comment_handler, action="edited their comment on"), + "pr:comment:deleted": partial(pr_comment_handler, action="deleted their comment on"), + "pr:declined": partial(pr_handler, action="declined"), + "pr:deleted": partial(pr_handler, action="deleted"), + "pr:merged": partial(pr_handler, action="merged"), + "pr:modified": partial(pr_handler, action="modified"), + "pr:opened": partial(pr_handler, action="opened"), + "pr:reviewer:approved": partial(pr_handler, action="approved"), + "pr:reviewer:needs_work": partial(pr_handler, action="needs_work"), + "pr:reviewer:updated": partial(pr_handler, action="reviewers_updated"), + "pr:reviewer:unapproved": partial(pr_handler, action="unapproved"), } # type Dict[str, Optional[Callable[..., List[Dict[str, str]]]]] def get_event_handler(eventkey: str) -> Callable[..., List[Dict[str, str]]]: @@ -154,6 +331,8 @@ def api_bitbucket3_webhook(request: HttpRequest, user_profile: UserProfile, if "branches" in signature(handler).parameters: data = handler(payload, branches) + elif "include_title" in signature(handler).parameters: + data = handler(payload, include_title=user_specified_topic) else: data = handler(payload) for element in data:
diff --git a/zerver/webhooks/bitbucket3/tests.py b/zerver/webhooks/bitbucket3/tests.py --- a/zerver/webhooks/bitbucket3/tests.py +++ b/zerver/webhooks/bitbucket3/tests.py @@ -8,80 +8,71 @@ class Bitbucket3HookTests(WebhookTestCase): EXPECTED_TOPIC = "sandbox" EXPECTED_TOPIC_BRANCH_EVENTS = "sandbox / {branch}" + # Core Repo Events: def test_commit_comment_added(self) -> None: - expected_message = """hypro999 commented on [508d1b6](http://139.59.64.214:7990/projects\ -/SBOX/repos/sandbox/commits/508d1b67f1f8f3a25f543a030a7a178894aa9907)\n~~~ quote\nJust an \ -arbitrary comment on a commit.\n~~~""" + expected_message = """[hypro999](http://139.59.64.214:7990/users/hypro999) commented on [508d1b6](http://139.59.64.214:7990/projects/SBOX/repos/sandbox/commits/508d1b67f1f8f3a25f543a030a7a178894aa9907)\n~~~ quote\nJust an arbitrary comment on a commit.\n~~~""" self.send_and_test_stream_message("commit_comment_added", self.EXPECTED_TOPIC, expected_message) def test_commit_comment_edited(self) -> None: - expected_message = """hypro999 edited their comment on [508d1b6](http://139.59.64.214:7990\ -/projects/SBOX/repos/sandbox/commits/508d1b67f1f8f3a25f543a030a7a178894aa9907)\n~~~ quote\nJust \ -an arbitrary comment on a commit. Nothing to see here...\n~~~""" + expected_message = """[hypro999](http://139.59.64.214:7990/users/hypro999) edited their comment on [508d1b6](http://139.59.64.214:7990/projects/SBOX/repos/sandbox/commits/508d1b67f1f8f3a25f543a030a7a178894aa9907)\n~~~ quote\nJust an arbitrary comment on a commit. Nothing to see here...\n~~~""" self.send_and_test_stream_message("commit_comment_edited", self.EXPECTED_TOPIC, expected_message) def test_commit_comment_deleted(self) -> None: - expected_message = """hypro999 deleted their comment on [508d1b6]\ -(http://139.59.64.214:7990/projects/SBOX/repos/sandbox/commits/508d1b67f1f8f3a25f543a030a7a178894a\ -a9907)\n~~~ quote\nJust an arbitrary comment on a commit. Nothing to see here...\n~~~""" + expected_message = """[hypro999](http://139.59.64.214:7990/users/hypro999) deleted their comment on [508d1b6](http://139.59.64.214:7990/projects/SBOX/repos/sandbox/commits/508d1b67f1f8f3a25f543a030a7a178894aa9907)\n~~~ quote\n~~Just an arbitrary comment on a commit. Nothing to see here...~~\n~~~""" self.send_and_test_stream_message("commit_comment_deleted", self.EXPECTED_TOPIC, expected_message) def test_bitbucket3_repo_forked(self) -> None: - expected_message = """User Hemanth V. Alluri(login: hypro999) forked the repository into \ -[sandbox fork](http://139.59.64.214:7990/users/hypro999/repos/sandbox-fork/browse).""" + expected_message = """User Hemanth V. Alluri(login: [hypro999](http://139.59.64.214:7990/users/hypro999)) forked the repository into [sandbox fork](http://139.59.64.214:7990/users/hypro999/repos/sandbox-fork/browse).""" self.send_and_test_stream_message("repo_forked", self.EXPECTED_TOPIC, expected_message) def test_bitbucket3_repo_modified(self) -> None: - expected_message = """hypro999 changed the name of the **sandbox** repo from **sandbox** \ -to **sandbox v2**""" + expected_message = """[hypro999](http://139.59.64.214:7990/users/hypro999) changed the name of the **sandbox** repo from **sandbox** to **sandbox v2**""" expected_topic = "sandbox v2" self.send_and_test_stream_message("repo_modified", expected_topic, expected_message) + # Repo Push Events: def test_push_add_branch(self) -> None: - expected_message = """hypro999 created branch2 branch""" + expected_message = """[hypro999](http://139.59.64.214:7990/users/hypro999) created branch2 branch""" expected_topic = self.EXPECTED_TOPIC_BRANCH_EVENTS.format(branch="branch2") self.send_and_test_stream_message("repo_push_add_branch", expected_topic, expected_message) def test_push_add_tag(self) -> None: - expected_message = """hypro999 pushed tag newtag""" + expected_message = """[hypro999](http://139.59.64.214:7990/users/hypro999) pushed tag newtag""" self.send_and_test_stream_message("repo_push_add_tag", self.EXPECTED_TOPIC, expected_message) def test_push_delete_branch(self) -> None: - expected_message = """hypro999 deleted branch branch2""" + expected_message = """[hypro999](http://139.59.64.214:7990/users/hypro999) deleted branch branch2""" expected_topic = self.EXPECTED_TOPIC_BRANCH_EVENTS.format(branch="branch2") self.send_and_test_stream_message("repo_push_delete_branch", expected_topic, expected_message) def test_push_delete_tag(self) -> None: - expected_message = """hypro999 removed tag test-tag""" + expected_message = """[hypro999](http://139.59.64.214:7990/users/hypro999) removed tag test-tag""" self.send_and_test_stream_message("repo_push_delete_tag", self.EXPECTED_TOPIC, expected_message) def test_push_update_single_branch(self) -> None: - expected_message = """hypro999 pushed to branch master. Head is now \ -e68c981ef53dbab0a5ca320a2d8d80e216c70528""" + expected_message = """[hypro999](http://139.59.64.214:7990/users/hypro999) pushed to branch master. Head is now e68c981ef53dbab0a5ca320a2d8d80e216c70528.""" expected_topic = self.EXPECTED_TOPIC_BRANCH_EVENTS.format(branch="master") self.send_and_test_stream_message("repo_push_update_single_branch", expected_topic, expected_message) def test_push_update_multiple_branches(self) -> None: - expected_message_first = """hypro999 pushed to branch branch1. Head is now \ -3980c2be32a7e23c795741d5dc1a2eecb9b85d6d""" - expected_message_second = """hypro999 pushed to branch master. Head is now \ -fc43d13cff1abb28631196944ba4fc4ad06a2cf2""" + expected_message_first = """[hypro999](http://139.59.64.214:7990/users/hypro999) pushed to branch branch1. Head is now 3980c2be32a7e23c795741d5dc1a2eecb9b85d6d.""" + expected_message_second = """[hypro999](http://139.59.64.214:7990/users/hypro999) pushed to branch master. Head is now fc43d13cff1abb28631196944ba4fc4ad06a2cf2.""" self.send_and_test_stream_message("repo_push_update_multiple_branches") msg = self.get_last_message() @@ -94,17 +85,187 @@ def test_push_update_multiple_branches(self) -> None: def test_push_update_multiple_branches_with_branch_filter(self) -> None: self.url = self.build_webhook_url(branches='master') - expected_message = """hypro999 pushed to branch master. Head is now \ -fc43d13cff1abb28631196944ba4fc4ad06a2cf2""" + expected_message = """[hypro999](http://139.59.64.214:7990/users/hypro999) pushed to branch master. Head is now fc43d13cff1abb28631196944ba4fc4ad06a2cf2.""" expected_topic = self.EXPECTED_TOPIC_BRANCH_EVENTS.format(branch="master") self.send_and_test_stream_message("repo_push_update_multiple_branches", expected_topic, expected_message) self.url = self.build_webhook_url(branches='branch1') - expected_message = """hypro999 pushed to branch branch1. Head is now \ -3980c2be32a7e23c795741d5dc1a2eecb9b85d6d""" + expected_message = """[hypro999](http://139.59.64.214:7990/users/hypro999) pushed to branch branch1. Head is now 3980c2be32a7e23c795741d5dc1a2eecb9b85d6d.""" expected_topic = self.EXPECTED_TOPIC_BRANCH_EVENTS.format(branch="branch1") self.send_and_test_stream_message("repo_push_update_multiple_branches", expected_topic, expected_message) + + # Core PR Events: + def test_pr_opened_without_reviewers(self) -> None: + expected_topic = "sandbox / PR #1 Branch1" + expected_message = """[hypro999](http://139.59.64.214:7990/users/hypro999) opened [PR #1](http://139.59.64.214:7990/projects/SBOX/repos/sandbox/pull-requests/1)\nfrom `branch1` to `master`\n\n~~~ quote\n* Add file2.txt\r\n* Add file3.txt\n~~~""" + self.send_and_test_stream_message("pull_request_opened_without_reviewers", + expected_topic, + expected_message) + + def test_pr_opened_without_description(self) -> None: + expected_topic = "sandbox / PR #2 Add notes feature." + expected_message = """[hypro999](http://139.59.64.214:7990/users/hypro999) opened [PR #2](http://139.59.64.214:7990/projects/SBOX/repos/sandbox/pull-requests/2)\nfrom `master` to `master`""" + self.send_and_test_stream_message("pull_request_opened_without_description", + expected_topic, + expected_message) + + def test_pr_opened_with_two_reviewers(self) -> None: + expected_topic = "sandbox / PR #5 Add Notes Feature" + expected_message = """[hypro999](http://139.59.64.214:7990/users/hypro999) opened [PR #5](http://139.59.64.214:7990/projects/SBOX/repos/sandbox/pull-requests/5)\nfrom `master` to `master` (assigned to [shimura](http://139.59.64.214:7990/users/shimura) and [sougo](http://139.59.64.214:7990/users/sougo) for review)""" + self.send_and_test_stream_message("pull_request_opened_with_two_reviewers", + expected_topic, + expected_message) + + def test_pr_opened_with_two_reviewers_and_user_defined_topic(self) -> None: + expected_topic = "sandbox / PR #5 Add Notes Feature" + expected_topic = "custom_topic" + self.url = self.build_webhook_url(topic='custom_topic') + expected_message = """[hypro999](http://139.59.64.214:7990/users/hypro999) opened [PR #5 Add Notes Feature](http://139.59.64.214:7990/projects/SBOX/repos/sandbox/pull-requests/5)\nfrom `master` to `master` (assigned to [shimura](http://139.59.64.214:7990/users/shimura) and [sougo](http://139.59.64.214:7990/users/sougo) for review)""" + self.send_and_test_stream_message("pull_request_opened_with_two_reviewers", + expected_topic, + expected_message) + + def test_pr_opened_with_mulitple_reviewers(self) -> None: + expected_topic = "sandbox / PR #6 sample_file: Add sample_file.txt." + expected_message = """[hypro999](http://139.59.64.214:7990/users/hypro999) opened [PR #6](http://139.59.64.214:7990/projects/SBOX/repos/sandbox/pull-requests/6)\nfrom `master` to `master` (assigned to [sougo](http://139.59.64.214:7990/users/sougo), [zura](http://139.59.64.214:7990/users/zura) and [shimura](http://139.59.64.214:7990/users/shimura) for review)\n\n~~~ quote\nAdd a simple text file for further testing purposes.\n~~~""" + self.send_and_test_stream_message("pull_request_opened_with_multiple_reviewers", + expected_topic, + expected_message) + + def test_pr_modified(self) -> None: + expected_topic = "sandbox / PR #1 Branch1" + expected_message = """[hypro999](http://139.59.64.214:7990/users/hypro999) modified [PR #1](http://139.59.64.214:7990/projects/SBOX/repos/sandbox/pull-requests/1)\nfrom `branch1` to `master` (assigned to [shimura](http://139.59.64.214:7990/users/shimura) for review)\n\n~~~ quote\n* Add file2.txt\n* Add file3.txt\nBoth of these files would be important additions to the project!\n~~~""" + self.send_and_test_stream_message("pull_request_modified", + expected_topic, + expected_message) + + def test_pr_modified_with_include_title(self) -> None: + expected_topic = "custom_topic" + expected_message = """[hypro999](http://139.59.64.214:7990/users/hypro999) modified [PR #1 Branch1](http://139.59.64.214:7990/projects/SBOX/repos/sandbox/pull-requests/1)\nfrom `branch1` to `master` (assigned to [shimura](http://139.59.64.214:7990/users/shimura) for review)\n\n~~~ quote\n* Add file2.txt\n* Add file3.txt\nBoth of these files would be important additions to the project!\n~~~""" + self.url = self.build_webhook_url(topic='custom_topic') + self.send_and_test_stream_message("pull_request_modified", + expected_topic, + expected_message) + + def test_pr_deleted(self) -> None: + expected_topic = "sandbox / PR #2 Add notes feature." + expected_message = """[hypro999](http://139.59.64.214:7990/users/hypro999) deleted [PR #2](http://139.59.64.214:7990/projects/SBOX/repos/sandbox/pull-requests/2)""" + self.send_and_test_stream_message("pull_request_deleted", + expected_topic, + expected_message) + + def test_pr_deleted_with_include_title(self) -> None: + expected_topic = "custom_topic" + expected_message = """[hypro999](http://139.59.64.214:7990/users/hypro999) deleted [PR #2 Add notes feature.](http://139.59.64.214:7990/projects/SBOX/repos/sandbox/pull-requests/2)""" + self.url = self.build_webhook_url(topic='custom_topic') + self.send_and_test_stream_message("pull_request_deleted", + expected_topic, + expected_message) + + def test_pr_declined(self) -> None: + expected_topic = "sandbox / PR #7 Crazy Idea" + expected_message = """[zura](http://139.59.64.214:7990/users/zura) declined [PR #7](http://139.59.64.214:7990/projects/SBOX/repos/sandbox/pull-requests/7)""" + self.send_and_test_stream_message("pull_request_declined", + expected_topic, + expected_message) + + def test_pr_merged(self) -> None: + expected_topic = "sandbox / PR #6 sample_file: Add sample_file.txt." + expected_message = """[zura](http://139.59.64.214:7990/users/zura) merged [PR #6](http://139.59.64.214:7990/projects/SBOX/repos/sandbox/pull-requests/6)""" + self.send_and_test_stream_message("pull_request_merged", + expected_topic, + expected_message) + + # PR Reviewer Events: + def test_pr_approved(self) -> None: + expected_topic = "sandbox / PR #6 sample_file: Add sample_file.txt." + expected_message = """[zura](http://139.59.64.214:7990/users/zura) approved [PR #6](http://139.59.64.214:7990/projects/SBOX/repos/sandbox/pull-requests/6)""" + self.send_and_test_stream_message("pull_request_approved", + expected_topic, + expected_message) + + def test_pr_unapproved(self) -> None: + expected_topic = "sandbox / PR #6 sample_file: Add sample_file.txt." + expected_message = """[zura](http://139.59.64.214:7990/users/zura) unapproved [PR #6](http://139.59.64.214:7990/projects/SBOX/repos/sandbox/pull-requests/6)""" + self.send_and_test_stream_message("pull_request_unapproved", + expected_topic, + expected_message) + + def test_pr_marked_as_needs_review(self) -> None: + expected_topic = "sandbox / PR #6 sample_file: Add sample_file.txt." + expected_message = """[zura](http://139.59.64.214:7990/users/zura) marked [PR #6](http://139.59.64.214:7990/projects/SBOX/repos/sandbox/pull-requests/6) as \"needs work\"""" + self.send_and_test_stream_message("pull_request_needs_work", + expected_topic, + expected_message) + + def test_pr_marked_as_needs_review_and_include_title(self) -> None: + expected_topic = "custom_topic" + expected_message = """[zura](http://139.59.64.214:7990/users/zura) marked [PR #6 sample_file: Add sample_file.txt.](http://139.59.64.214:7990/projects/SBOX/repos/sandbox/pull-requests/6) as \"needs work\"""" + self.url = self.build_webhook_url(topic='custom_topic') + self.send_and_test_stream_message("pull_request_needs_work", + expected_topic, + expected_message) + + def test_pull_request_reviewer_added(self) -> None: + expected_message = """[hypro999](http://139.59.64.214:7990/users/hypro999) reassigned [PR #1](http://139.59.64.214:7990/projects/SBOX/repos/sandbox/pull-requests/1) to [shimura](http://139.59.64.214:7990/users/shimura)""" + expected_topic = "sandbox / PR #1 Branch1" + self.send_and_test_stream_message("pull_request_add_reviewer", + expected_topic, + expected_message) + + def test_pull_request_reviewer_added_and_include_title(self) -> None: + expected_message = """[hypro999](http://139.59.64.214:7990/users/hypro999) reassigned [PR #1 Branch1](http://139.59.64.214:7990/projects/SBOX/repos/sandbox/pull-requests/1) to [shimura](http://139.59.64.214:7990/users/shimura)""" + expected_topic = "custom_topic" + self.url = self.build_webhook_url(topic='custom_topic') + self.send_and_test_stream_message("pull_request_add_reviewer", + expected_topic, + expected_message) + + def test_pull_request_reviewers_added(self) -> None: + expected_message = """[hypro999](http://139.59.64.214:7990/users/hypro999) reassigned [PR #1](http://139.59.64.214:7990/projects/SBOX/repos/sandbox/pull-requests/1) to [shimura](http://139.59.64.214:7990/users/shimura) and [sougo](http://139.59.64.214:7990/users/sougo)""" + expected_topic = "sandbox / PR #1 Branch1" + self.send_and_test_stream_message("pull_request_add_two_reviewers", + expected_topic, + expected_message) + + def test_pull_request_remove_all_reviewers(self) -> None: + expected_message = """[hypro999](http://139.59.64.214:7990/users/hypro999) removed all reviewers from [PR #1](http://139.59.64.214:7990/projects/SBOX/repos/sandbox/pull-requests/1)""" + expected_topic = "sandbox / PR #1 Branch1" + self.send_and_test_stream_message("pull_request_remove_reviewer", + expected_topic, + expected_message) + + def test_pull_request_remove_all_reviewers_with_title(self) -> None: + expected_message = """[hypro999](http://139.59.64.214:7990/users/hypro999) removed all reviewers from [PR #1 Branch1](http://139.59.64.214:7990/projects/SBOX/repos/sandbox/pull-requests/1)""" + expected_topic = "sandbox / PR #1 Branch1" + expected_topic = "custom_topic" + self.url = self.build_webhook_url(topic='custom_topic') + self.send_and_test_stream_message("pull_request_remove_reviewer", + expected_topic, + expected_message) + + # PR Comment Events: + def test_pull_request_comment_added(self) -> None: + expected_message = """[zura](http://139.59.64.214:7990/users/zura) commented on [PR #6](http://139.59.64.214:7990/projects/SBOX/repos/sandbox/pull-requests/6)\n\n~~~ quote\nThis seems like a pretty good idea.\n~~~""" + expected_topic = "sandbox / PR #6 sample_file: Add sample_file.txt." + self.send_and_test_stream_message("pull_request_comment_added", + expected_topic, + expected_message) + + def test_pull_request_comment_edited(self) -> None: + expected_message = """[zura](http://139.59.64.214:7990/users/zura) edited their comment on [PR #6](http://139.59.64.214:7990/projects/SBOX/repos/sandbox/pull-requests/6)\n\n~~~ quote\nThis seems like a pretty good idea. @shimura what do you think?\n~~~""" + expected_topic = "sandbox / PR #6 sample_file: Add sample_file.txt." + self.send_and_test_stream_message("pull_request_comment_edited", + expected_topic, + expected_message) + + def test_pull_request_comment_deleted(self) -> None: + expected_message = """[zura](http://139.59.64.214:7990/users/zura) deleted their comment on [PR #6](http://139.59.64.214:7990/projects/SBOX/repos/sandbox/pull-requests/6)\n\n~~~ quote\n~~This seems like a pretty good idea. @shimura what do you think?~~\n~~~""" + expected_topic = "sandbox / PR #6 sample_file: Add sample_file.txt." + self.send_and_test_stream_message("pull_request_comment_deleted", + expected_topic, + expected_message)
Bitbucket Server: Complete Support and Add Enhancements **Contents:** This pull request consists of 2 commits. 1. Complete the Bitbucket server (bitbucket 3) incoming webhook integration by extending support for all pull request events. 2. Enhance and refactor the code. From the commit message: "In terms of minor enhancements, this commit will now link each actor's name with the url to their profile thus giving the messages a much more complete feel. Also, for when comments on a commit are deleted, the quote will now use stikethrough text. In terms of refactoring, this commit removes the unnecessary line splitting in this integration's tests (mainly observed with the expected_message variables) thus making it easier to read." **Testing Plan:** <!-- How have you tested? --> A plethora of new automated tests have been added and additional testing has been done manually. Since this is quite a bit of code to review all at once, I've made a small script that could make the process of viewing the output a bit more enjoyable by generating all of the fixture messages at one go so that they can easily be viewed from the development environment - i.e. after running `./tools/run-dev.py` (see attachment: [visual-test.zip](https://github.com/zulip/zulip/files/3009417/visual-test.zip)) - some "macros" (just variables) need to be manually set and then this script can be run after placing in the top level zulip directory.
2019-03-26T17:29:49
zulip/zulip
12,147
zulip__zulip-12147
[ "12030" ]
d67dc2eb9401fc9f08b9a007325fbfadd8fe81ca
diff --git a/scripts/lib/setup_path_on_import.py b/scripts/lib/setup_path_on_import.py --- a/scripts/lib/setup_path_on_import.py +++ b/scripts/lib/setup_path_on_import.py @@ -13,6 +13,6 @@ # this file will exist in production if os.path.exists(activate_this): activate_locals = dict(__file__=activate_this) - exec(open(activate_this).read(), {}, activate_locals) + exec(open(activate_this).read(), activate_locals) if not os.path.exists(activate_locals["site_packages"]): raise RuntimeError(venv + " was not set up for this Python version") diff --git a/tools/lib/provision.py b/tools/lib/provision.py --- a/tools/lib/provision.py +++ b/tools/lib/provision.py @@ -421,7 +421,7 @@ def main(options): setup_venvs.main() activate_this = "/srv/zulip-py3-venv/bin/activate_this.py" - exec(open(activate_this).read(), {}, dict(__file__=activate_this)) + exec(open(activate_this).read(), dict(__file__=activate_this)) setup_shell_profile('~/.bash_profile') setup_shell_profile('~/.zprofile')
Can't upgrade from 1.8.1 to any other version Hello, we currently have Zulip 1.8.1 running on an Ubuntu 16.04.6 LTS server. We wanted to upgrade to benefit from some new features but find we are unable to do so. Environment: -Zulip 1.8.1 running on Ubuntu 16.04.6 LTS, which is running on an ESXi 6.0 Hypervisor -Every connection to Websites have to be made over a squid-Proxy The upgrade via the command "/home/zulip/deployments/current/scripts/upgrade-zulip /usr/local/src/zulip-server-2.0.2.tar.gz" hangs at the step "Installing setuptools, pip, wheel" We expected our Proxy was at fault (even though environmental variables were set) and bypassed it via tethering over a mobile phone. It all seemed to run well until we get to the point where it executes the script "/home/zulip/deployments/2019-04-02-12-21-11/scripts/setup/generate_secrets.py", at which point the upgrade aborts with the error "NameError: name 'prev' is not defined". I attached a screenshot. Can anybody help fix this issue? Thanks Jan ![grafik](https://user-images.githubusercontent.com/49191973/55395785-9820ce00-5542-11e9-9ee8-6b091e018966.png)
@clk-jkn the first issue is definitely a proxy/networking issue, glad you were able to figure that out. Does this `prev` error happen reproducibly if you try multiple times? The issue with `prev` I've never seen before, and I'm puzzled by, because I don't think the tool in question has an identifier called `prev`. @andersk do you have any idea what could cause this or how to debug further? Hello @zulip/server-production members, this issue was labeled with the "area: production" label, so you may want to check it out! <!-- areaLabelAddition --> It does happen reproducibly. Even with different versions. We tried upgrading to 1.9.0 because we thought the error might be related to the jump from 1.8.1 to 2.0.2, but even then it failed with the same exact error. I looked into the script but I am no programer and couldn't find anything obvious with my limited python knowledge. Which is why I asked for help here. I just can't seem to figure this one out on my own. Thanks for helping! Can you provide the content of `/var/log/zulip/upgrade.log`? It might have details that help investigate this. The `NameError` is complaining about the `prev` variable in `activate_this.py`. Some versions of `activate_this.py` do use `prev`, but they also define it as expected, so the `NameError` is strange indeed. Can you paste in the contents of `/home/zulip/deployments/2019-04-02-12-21-11/zulip-py3-venv/bin/activate_this.py`? Sure, no Problem. Contents of activate_this.py: ``` """Activate virtualenv for current interpreter: Use exec(open(this_file).read(), {'__file__': this_file}). This can be used when you must use an existing Python interpreter, not the virtualenv bin/python. """ import os import site import sys try: __file__ except NameError: raise AssertionError("You must use exec(open(this_file).read(), {'__file__': this_file}))") # prepend bin to PATH (this file is inside the bin directory) bin_dir = os.path.dirname(os.path.abspath(__file__)) os.environ["PATH"] = os.pathsep.join([bin_dir] + os.environ.get("PATH", "").split(os.pathsep)) base = os.path.dirname(bin_dir) # virtual env is right above bin directory os.environ["VIRTUAL_ENV"] = base # add the virtual environments site-package to the host python import mechanism IS_PYPY = hasattr(sys, "pypy_version_info") IS_JYTHON = sys.platform.startswith("java") if IS_JYTHON: site_packages = os.path.join(base, "Lib", "site-packages") elif IS_PYPY: site_packages = os.path.join(base, "site-packages") else: IS_WIN = sys.platform == "win32" if IS_WIN: site_packages = os.path.join(base, "Lib", "site-packages") else: site_packages = os.path.join(base, "lib", "python{}".format(sys.version[:3]), "site-packages") prev = set(sys.path) site.addsitedir(site_packages) sys.real_prefix = sys.prefix sys.prefix = base # Move the added items to the front of the path, in place new = list(sys.path) sys.path[:] = [i for i in new if i not in prev] + [i for i in new if i in prev] ``` I re-executed the upgrade to give you a good clean upgrade.log. I attached it, as it is rather long thanks to apt... Once again thank you for helping. [upgrade.log](https://github.com/zulip/zulip/files/3052972/upgrade.log)
2019-04-16T23:32:00
zulip/zulip
12,231
zulip__zulip-12231
[ "12199" ]
bc9d7141b2ab8c914e91318258f5e72aed3be3f0
diff --git a/zerver/context_processors.py b/zerver/context_processors.py --- a/zerver/context_processors.py +++ b/zerver/context_processors.py @@ -133,6 +133,7 @@ def zulip_default_context(request: HttpRequest) -> Dict[str, Any]: 'allow_search_engine_indexing': allow_search_engine_indexing, } + context['OPEN_GRAPH_URL'] = '%s%s' % (realm_uri, request.path) if realm is not None and realm.icon_source == realm.ICON_UPLOADED: context['OPEN_GRAPH_IMAGE'] = '%s%s' % (realm_uri, realm_icon)
diff --git a/zerver/tests/test_middleware.py b/zerver/tests/test_middleware.py --- a/zerver/tests/test_middleware.py +++ b/zerver/tests/test_middleware.py @@ -184,3 +184,13 @@ def test_login_page_realm_icon(self) -> None: twitter_image = bs.select_one('meta[name="twitter:image"]').get('content') self.assertTrue(open_graph_image.endswith(realm_icon)) self.assertTrue(twitter_image.endswith(realm_icon)) + + def test_no_realm_api_page_og_url(self) -> None: + response = self.client_get('/api/', subdomain='') + self.assertEqual(response.status_code, 200) + + decoded = response.content.decode('utf-8') + bs = BeautifulSoup(decoded, features='lxml') + open_graph_url = bs.select_one('meta[property="og:url"]').get('content') + + self.assertTrue(open_graph_url.endswith('/api/'))
Open Graph og:url tags over-canonicalize all URLs back to the home page Split from #12187: > By the way, it turns out all our OpenGraph tags are [busted anyway](https://developers.facebook.com/tools/debug/sharing/?q=https%3A%2F%2Fchat.zulip.org%2Fapi%2Fincoming-webhooks-walkthrough) because we always set `og:url` to point to the home page (which redirects to /login, whose `og:url` points back to the home page).
Hello @zulip/server-misc members, this issue was labeled with the "area: portico" label, so you may want to check it out! <!-- areaLabelAddition --> I think we either want to delete our `og:url` tags (if that DTRT) or specify the current page's address (I think relative links don't work?); I'm not sure we have a use case for pointing to a different canonical URL from the current one right now. @punchagan this may be worth looking at while you're working on open graph stuff. @zulipbot claim
2019-04-29T13:55:55
zulip/zulip
12,240
zulip__zulip-12240
[ "12228" ]
edb956091f86e973a5b4613fba2898969a91a86d
diff --git a/zerver/lib/html_to_text.py b/zerver/lib/html_to_text.py --- a/zerver/lib/html_to_text.py +++ b/zerver/lib/html_to_text.py @@ -1,4 +1,4 @@ -from typing import List, Optional +from typing import Dict, Optional from bs4 import BeautifulSoup from django.http import HttpRequest @@ -6,7 +6,7 @@ from zerver.lib.cache import cache_with_key, open_graph_description_cache_key -def html_to_text(content: str, tags: Optional[List[str]]=None) -> str: +def html_to_text(content: str, tags: Optional[Dict[str, str]]=None) -> str: bs = BeautifulSoup(content, features='lxml') # Skip any admonition (warning) blocks, since they're # usually something about users needing to be an @@ -21,10 +21,15 @@ def html_to_text(content: str, tags: Optional[List[str]]=None) -> str: text = '' if tags is None: - tags = ['p'] - for paragraph in bs.find_all(tags): + tags = {'p': ' | '} + for element in bs.find_all(tags.keys()): + # Ignore empty elements + if not element.text: + continue # .text converts it from HTML to text - text = text + paragraph.text + ' ' + if text: + text += tags[element.name] + text += element.text if len(text) > 500: break return escape(' '.join(text.split())) diff --git a/zerver/lib/realm_description.py b/zerver/lib/realm_description.py --- a/zerver/lib/realm_description.py +++ b/zerver/lib/realm_description.py @@ -13,4 +13,4 @@ def get_realm_rendered_description(realm: Realm) -> str: @cache_with_key(realm_text_description_cache_key, timeout=3600*24*7) def get_realm_text_description(realm: Realm) -> str: html_description = get_realm_rendered_description(realm) - return html_to_text(html_description, ['p', 'li']) + return html_to_text(html_description, {'p': ' | ', 'li': ' * '})
diff --git a/zerver/tests/test_middleware.py b/zerver/tests/test_middleware.py --- a/zerver/tests/test_middleware.py +++ b/zerver/tests/test_middleware.py @@ -83,7 +83,7 @@ def test_admonition_and_link(self) -> None: '/help/disable-message-edit-history', "Disable message edit history (Zulip Help Center)", ["By default, Zulip displays messages", - "users can view the edit history of a message. To remove the", + "users can view the edit history of a message. | To remove the", "best to delete the message entirely. "], ["Disable message edit history", "feature is only available", "Related articles", "Restrict message editing"] @@ -104,7 +104,7 @@ def test_settings_tab(self) -> None: self.check_title_and_description( '/help/deactivate-your-account', "Deactivate your account (Zulip Help Center)", - ["Any bots that you maintain will be disabled. Deactivating "], + ["Any bots that you maintain will be disabled. | Deactivating "], ["Confirm by clicking", " ", "\n"]) def test_tabs(self) -> None: @@ -135,7 +135,7 @@ def test_nonexistent_page(self) -> None: '/help/not-a-real-page', # Probably we should make this "Zulip Help Center" "No such article. (Zulip Help Center)", - ["No such article. We're here to help!", + ["No such article. | We're here to help!", "Email us at [email protected] with questions, feedback, or feature requests."], [], # Test that our open graph logic doesn't throw a 500 @@ -166,7 +166,7 @@ def test_login_page_markdown_description(self) -> None: '/login/', 'Zulip Dev', ['Welcome to Clojurians Zulip - the place where the Clojure community meets', - 'note-1', 'note-2', 'note-3', 'Enjoy!'], + '* note-1 * note-2 * note-3 | Enjoy!'], []) def test_login_page_realm_icon(self) -> None:
Add delimiter between paragraphs and bullets in open graph descriptions Split from #12178 > I think we might want to make consider making bulleted lists have some sort of delimeter between the bullets (see the Slack preview, which is long enough to run into this, above). Possibly just converting those back into * might be a good idea. Maybe new paragraph into |, and bullets into *?
@zulipbot claim @rishig @timabbott The text for czo would read something like this, with the `|` and `*` as delimiters. > Welcome to the Zulip development and user community! | Join to get a quick Zulip demo, observe a Zulip community, offer feedback to the Zulip core team, or get involved in as a contributor. * Community conventions * Code of Conduct | Note that this server runs a bleeding-edge version of Zulip, so you may encounter bugs. Please report them! Does this seem ok to you?
2019-05-01T02:40:13
zulip/zulip
12,319
zulip__zulip-12319
[ "8562" ]
b38ae6e0edf2661dddb02afd06104f38a1dbc8b9
diff --git a/zerver/lib/bugdown/__init__.py b/zerver/lib/bugdown/__init__.py --- a/zerver/lib/bugdown/__init__.py +++ b/zerver/lib/bugdown/__init__.py @@ -677,16 +677,21 @@ def youtube_id(self, url: str) -> Optional[str]: if not self.markdown.image_preview_enabled: return None # Youtube video id extraction regular expression from http://pastebin.com/KyKAFv1s - # Slightly modified to support URLs of the form youtu.be/<id> + # Slightly modified to support URLs of the forms + # - youtu.be/<id> + # - youtube.com/playlist?v=<id>&list=<list-id> + # - youtube.com/watch_videos?video_ids=<id1>,<id2>,<id3> # If it matches, match.group(2) is the video id. schema_re = r'(?:https?://)' host_re = r'(?:youtu\.be/|(?:\w+\.)?youtube(?:-nocookie)?\.com/)' - param_re = r'(?:(?:(?:v|embed)/)|(?:(?:watch(?:_popup)?(?:\.php)?)?(?:\?|#!?)(?:.+&)?v=))' + param_re = r'(?:(?:(?:v|embed)/)|' + \ + r'(?:(?:(?:watch|playlist)(?:_popup|_videos)?(?:\.php)?)?(?:\?|#!?)(?:.+&)?v(?:ideo_ids)?=))' id_re = r'([0-9A-Za-z_-]+)' youtube_re = r'^({schema_re}?{host_re}{param_re}?)?{id_re}(?(1).+)?$' youtube_re = youtube_re.format(schema_re=schema_re, host_re=host_re, id_re=id_re, param_re=param_re) match = re.match(youtube_re, url) - if match is None: + # URLs of the form youtube.com/playlist?list=<list-id> are incorrectly matched + if match is None or match.group(2) == 'playlist': return None return match.group(2)
diff --git a/zerver/tests/test_bugdown.py b/zerver/tests/test_bugdown.py --- a/zerver/tests/test_bugdown.py +++ b/zerver/tests/test_bugdown.py @@ -360,6 +360,21 @@ def test_inline_youtube(self) -> None: self.assertEqual(converted, '<p><a href="https://youtu.be/hx1mjT73xYE" target="_blank" title="https://youtu.be/hx1mjT73xYE">https://youtu.be/hx1mjT73xYE</a></p>\n<div class="youtube-video message_inline_image"><a data-id="hx1mjT73xYE" href="https://youtu.be/hx1mjT73xYE" target="_blank" title="https://youtu.be/hx1mjT73xYE"><img src="https://i.ytimg.com/vi/hx1mjT73xYE/default.jpg"></a></div>') + msg = 'https://www.youtube.com/playlist?list=PL8dPuuaLjXtNlUrzyH5r6jN9ulIgZBpdo' + not_converted = bugdown_convert(msg) + + self.assertEqual(not_converted, '<p><a href="https://www.youtube.com/playlist?list=PL8dPuuaLjXtNlUrzyH5r6jN9ulIgZBpdo" target="_blank" title="https://www.youtube.com/playlist?list=PL8dPuuaLjXtNlUrzyH5r6jN9ulIgZBpdo">https://www.youtube.com/playlist?list=PL8dPuuaLjXtNlUrzyH5r6jN9ulIgZBpdo</a></p>') + + msg = 'https://www.youtube.com/playlist?v=O5nskjZ_GoI&list=PL8dPuuaLjXtNlUrzyH5r6jN9ulIgZBpdo' + converted = bugdown_convert(msg) + + self.assertEqual(converted, '<p><a href="https://www.youtube.com/playlist?v=O5nskjZ_GoI&amp;list=PL8dPuuaLjXtNlUrzyH5r6jN9ulIgZBpdo" target="_blank" title="https://www.youtube.com/playlist?v=O5nskjZ_GoI&amp;list=PL8dPuuaLjXtNlUrzyH5r6jN9ulIgZBpdo">https://www.youtube.com/playlist?v=O5nskjZ_GoI&amp;list=PL8dPuuaLjXtNlUrzyH5r6jN9ulIgZBpdo</a></p>\n<div class="youtube-video message_inline_image"><a data-id="O5nskjZ_GoI" href="https://www.youtube.com/playlist?v=O5nskjZ_GoI&amp;list=PL8dPuuaLjXtNlUrzyH5r6jN9ulIgZBpdo" target="_blank" title="https://www.youtube.com/playlist?v=O5nskjZ_GoI&amp;list=PL8dPuuaLjXtNlUrzyH5r6jN9ulIgZBpdo"><img src="https://i.ytimg.com/vi/O5nskjZ_GoI/default.jpg"></a></div>') + + msg = 'http://www.youtube.com/watch_videos?video_ids=nOJgD4fcZhI,i96UO8-GFvw' + converted = bugdown_convert(msg) + + self.assertEqual(converted, '<p><a href="http://www.youtube.com/watch_videos?video_ids=nOJgD4fcZhI,i96UO8-GFvw" target="_blank" title="http://www.youtube.com/watch_videos?video_ids=nOJgD4fcZhI,i96UO8-GFvw">http://www.youtube.com/watch_videos?video_ids=nOJgD4fcZhI,i96UO8-GFvw</a></p>\n<div class="youtube-video message_inline_image"><a data-id="nOJgD4fcZhI" href="http://www.youtube.com/watch_videos?video_ids=nOJgD4fcZhI,i96UO8-GFvw" target="_blank" title="http://www.youtube.com/watch_videos?video_ids=nOJgD4fcZhI,i96UO8-GFvw"><img src="https://i.ytimg.com/vi/nOJgD4fcZhI/default.jpg"></a></div>') + def test_inline_vimeo(self) -> None: msg = 'Check out the debate: https://vimeo.com/246979354' converted = bugdown_convert(msg)
Support for Youtube playlist embeds Some youtube playlist urls are not recognized by zulip for embedding. Works: https://www.youtube.com/watch?v=OPf0YbXqDm0&list=PLMC9KNkIncKtPzgY-5rmhvj7fax8fdxoj Doesn't work: https://www.youtube.com/playlist?list=PL8dPuuaLjXtNlUrzyH5r6jN9ulIgZBpdo https://youtu.be/fJXwerDHg18?list=PL30nYyn47z1Clxr8tSpbRagFu66nrk32x
Hello @zulip/server-markdown members, this issue was labeled with the **area: markdown** label, so you may want to check it out! <!-- areaLabelNotification --> I would like to also add that youtube's shortened versions do not get recognized by zulip. Ex: https://youtu.be/gjDrEdEzfQc vs https://www.youtube.com/watch?v=gjDrEdEzfQc&feature=youtu.be @zulipbot claim Not supporting the shortened code is in particular, annoying, as most youtube links will come in this form now if you take straight from youtube... @Yoshi8765 hmm, I tried every single link above and it seemed to be embedded in Zulip every time. Maybe just a settings issue on your realm? I attached the screenshot below of all the links + their thumbnails which seemed to work fine. ![screen shot 2018-03-10 at 12 13 31 pm](https://user-images.githubusercontent.com/12992571/37246345-98bbdfb6-245c-11e8-87f1-c29137ca6153.png) @lonerz I just retried it in my realm. Each are being posted as separate posts. ![](https://i.imgur.com/tp2IaGR.png) I've look at my realm (organization) settings, but I can't see anything that might be affecting this. Is there anything I can provide? If you tried to test that in chat.zulip.org I believe the platform there is a bleeding edge version which is different from what other realms are on. @Yoshi8765 I tried this on my dev server off the current master. It seems like chat.zulip.org has the same issues you are facing. @timabbott is there a difference between the markdown link parsing on chat.zulip.org and the dev server? @lonerz yes, in the development environment, we have the `INLINE_URL_EMBED_PREVIEW` setting on; it's off by default in production (but can be toggled). Hello @lonerz, you have been unassigned from this issue because you have not updated this issue or any referenced pull requests for over 14 days. You can reclaim this issue or claim any other issue by commenting `@zulipbot claim` on that issue. Thanks for your contributions, and hope to see you again soon!
2019-05-12T10:15:45
zulip/zulip
12,366
zulip__zulip-12366
[ "12347" ]
1353e94b29c25e53740c51d51268f0b73bfd0e83
diff --git a/version.py b/version.py --- a/version.py +++ b/version.py @@ -11,4 +11,4 @@ # Typically, adding a dependency only requires a minor version bump, and # removing a dependency requires a major version bump. -PROVISION_VERSION = '32.0' +PROVISION_VERSION = '32.1'
Scrollbar drag can result in unintended click actions Split off from #11792: > * on the settings pages, if you click on the scrollbar, drag it down, and then release your click when the mouse is outside the settings modal (e.g. below it or to the right), it closes the settings modal. I don't know if this is an existing thing or a regression, but I ran into it a bunch of times when testing even after knowing the behavior. This was not a regression from perfect-scrollbar, but I fixed it in Grsmto/simplebar#312 and Grsmto/simplebar#317. Just waiting for the fixes to be included in a new upstream release.
2019-05-21T02:55:43
zulip/zulip
12,483
zulip__zulip-12483
[ "8379" ]
0f3c2748dd8bb829ada039078bb40760f6fcbae6
diff --git a/zerver/lib/bugdown/__init__.py b/zerver/lib/bugdown/__init__.py --- a/zerver/lib/bugdown/__init__.py +++ b/zerver/lib/bugdown/__init__.py @@ -1,7 +1,7 @@ # Zulip's main markdown implementation. See docs/subsystems/markdown.md for # detailed documentation on our markdown syntax. from typing import (Any, Callable, Dict, Iterable, List, NamedTuple, - Optional, Set, Tuple, TypeVar, Union, cast) + Optional, Set, Tuple, TypeVar, Union) from mypy_extensions import TypedDict from typing.re import Match, Pattern @@ -284,7 +284,8 @@ def walk_tree(root: Element, ElementFamily = NamedTuple('ElementFamily', [ ('grandparent', Optional[Element]), ('parent', Element), - ('child', Element) + ('child', Element), + ('in_blockquote', bool), ]) ResultWithFamily = NamedTuple('ResultWithFamily', [ @@ -293,7 +294,7 @@ def walk_tree(root: Element, ]) ElementPair = NamedTuple('ElementPair', [ - ('parent', Optional[Element]), + ('parent', Optional[Any]), # Recursive types are not fully supported yet ('value', Element) ]) @@ -307,18 +308,19 @@ def walk_tree_with_family(root: Element, currElementPair = queue.popleft() for child in currElementPair.value.getchildren(): if child.getchildren(): - queue.append(ElementPair(parent=currElementPair, value=child)) # type: ignore # Lack of Deque support in typing module for Python 3.4.3 + queue.append(ElementPair(parent=currElementPair, value=child)) # type: ignore # Lack of Deque support in typing module for Python <=3.5.3 result = processor(child) if result is not None: if currElementPair.parent is not None: - grandparent_element = cast(ElementPair, currElementPair.parent) + grandparent_element = currElementPair.parent grandparent = grandparent_element.value else: grandparent = None family = ElementFamily( grandparent=grandparent, parent=currElementPair.value, - child=child + child=child, + in_blockquote=has_blockquote_ancestor(currElementPair) ) results.append(ResultWithFamily( @@ -328,6 +330,14 @@ def walk_tree_with_family(root: Element, return results +def has_blockquote_ancestor(element_pair: Optional[ElementPair]) -> bool: + if element_pair is None: + return False + elif element_pair.value.tag == 'blockquote': + return True + else: + return has_blockquote_ancestor(element_pair.parent) + # height is not actually used def add_a( root: Element, @@ -1024,7 +1034,8 @@ def is_absolute_url(self, url: str) -> bool: def run(self, root: Element) -> None: # Get all URLs from the blob found_urls = walk_tree_with_family(root, self.get_url_data) - unique_urls = {found_url.result[0] for found_url in found_urls} + # Collect unique URLs which are not quoted + unique_urls = {found_url.result[0] for found_url in found_urls if not found_url.family.in_blockquote} if len(found_urls) == 0 or len(unique_urls) > self.INLINE_PREVIEW_LIMIT_PER_MESSAGE: return @@ -1034,7 +1045,7 @@ def run(self, root: Element) -> None: for found_url in found_urls: (url, text) = found_url.result - if url not in processed_urls: + if url in unique_urls and url not in processed_urls: processed_urls.add(url) else: continue
diff --git a/zerver/tests/fixtures/markdown_test_cases.json b/zerver/tests/fixtures/markdown_test_cases.json --- a/zerver/tests/fixtures/markdown_test_cases.json +++ b/zerver/tests/fixtures/markdown_test_cases.json @@ -305,7 +305,7 @@ { "name": "blockquote_inline_image", "input": ">Google logo today:\n>https://www.google.com/images/srpr/logo4w.png\n>Kinda boring", - "expected_output": "<blockquote>\n<p>Google logo today:<br>\n<a href=\"https://www.google.com/images/srpr/logo4w.png\" target=\"_blank\" title=\"https://www.google.com/images/srpr/logo4w.png\">https://www.google.com/images/srpr/logo4w.png</a><br>\nKinda boring</p>\n<div class=\"message_inline_image\"><a href=\"https://www.google.com/images/srpr/logo4w.png\" target=\"_blank\" title=\"https://www.google.com/images/srpr/logo4w.png\"><img data-src-fullsize=\"/thumbnail?url=https%3A%2F%2Fwww.google.com%2Fimages%2Fsrpr%2Flogo4w.png&amp;size=full\" src=\"/thumbnail?url=https%3A%2F%2Fwww.google.com%2Fimages%2Fsrpr%2Flogo4w.png&amp;size=thumbnail\"></a></div></blockquote>", + "expected_output": "<blockquote>\n<p>Google logo today:<br>\n<a href=\"https://www.google.com/images/srpr/logo4w.png\" target=\"_blank\" title=\"https://www.google.com/images/srpr/logo4w.png\">https://www.google.com/images/srpr/logo4w.png</a><br>\nKinda boring</p>\n</blockquote>", "backend_only_rendering": true, "text_content": "> Google logo today:\n> https:\/\/www.google.com\/images\/srpr\/logo4w.png\n> Kinda boring\n" }, diff --git a/zerver/tests/test_bugdown.py b/zerver/tests/test_bugdown.py --- a/zerver/tests/test_bugdown.py +++ b/zerver/tests/test_bugdown.py @@ -444,6 +444,29 @@ def test_inline_image_preview(self): converted = render_markdown(msg, content) self.assertEqual(converted, without_preview) + @override_settings(INLINE_IMAGE_PREVIEW=True) + def test_inline_image_quoted_blocks(self) -> None: + content = 'http://cdn.wallpapersafari.com/13/6/16eVjx.jpeg' + expected = '<div class="message_inline_image"><a href="http://cdn.wallpapersafari.com/13/6/16eVjx.jpeg" target="_blank" title="http://cdn.wallpapersafari.com/13/6/16eVjx.jpeg"><img data-src-fullsize="/thumbnail?url=http%3A%2F%2Fcdn.wallpapersafari.com%2F13%2F6%2F16eVjx.jpeg&amp;size=full" src="/thumbnail?url=http%3A%2F%2Fcdn.wallpapersafari.com%2F13%2F6%2F16eVjx.jpeg&amp;size=thumbnail"></a></div>' + sender_user_profile = self.example_user('othello') + msg = Message(sender=sender_user_profile, sending_client=get_client("test")) + converted = render_markdown(msg, content) + self.assertEqual(converted, expected) + + content = '>http://cdn.wallpapersafari.com/13/6/16eVjx.jpeg\n\nAwesome!' + expected = '<blockquote>\n<p><a href="http://cdn.wallpapersafari.com/13/6/16eVjx.jpeg" target="_blank" title="http://cdn.wallpapersafari.com/13/6/16eVjx.jpeg">http://cdn.wallpapersafari.com/13/6/16eVjx.jpeg</a></p>\n</blockquote>\n<p>Awesome!</p>' + sender_user_profile = self.example_user('othello') + msg = Message(sender=sender_user_profile, sending_client=get_client("test")) + converted = render_markdown(msg, content) + self.assertEqual(converted, expected) + + content = '>- http://cdn.wallpapersafari.com/13/6/16eVjx.jpeg\n\nAwesome!' + expected = '<blockquote>\n<ul>\n<li><a href="http://cdn.wallpapersafari.com/13/6/16eVjx.jpeg" target="_blank" title="http://cdn.wallpapersafari.com/13/6/16eVjx.jpeg">http://cdn.wallpapersafari.com/13/6/16eVjx.jpeg</a></li>\n</ul>\n</blockquote>\n<p>Awesome!</p>' + sender_user_profile = self.example_user('othello') + msg = Message(sender=sender_user_profile, sending_client=get_client("test")) + converted = render_markdown(msg, content) + self.assertEqual(converted, expected) + @override_settings(INLINE_IMAGE_PREVIEW=True) def test_inline_image_preview_order(self) -> None: content = 'http://imaging.nikon.com/lineup/dslr/df/img/sample/img_01.jpg\nhttp://imaging.nikon.com/lineup/dslr/df/img/sample/img_02.jpg\nhttp://imaging.nikon.com/lineup/dslr/df/img/sample/img_03.jpg' @@ -455,7 +478,7 @@ def test_inline_image_preview_order(self) -> None: self.assertEqual(converted, expected) content = 'http://imaging.nikon.com/lineup/dslr/df/img/sample/img_01.jpg\n\n>http://imaging.nikon.com/lineup/dslr/df/img/sample/img_02.jpg\n\n* http://imaging.nikon.com/lineup/dslr/df/img/sample/img_03.jpg\n* https://www.google.com/images/srpr/logo4w.png' - expected = '<div class="message_inline_image"><a href="http://imaging.nikon.com/lineup/dslr/df/img/sample/img_01.jpg" target="_blank" title="http://imaging.nikon.com/lineup/dslr/df/img/sample/img_01.jpg"><img data-src-fullsize="/thumbnail?url=http%3A%2F%2Fimaging.nikon.com%2Flineup%2Fdslr%2Fdf%2Fimg%2Fsample%2Fimg_01.jpg&amp;size=full" src="/thumbnail?url=http%3A%2F%2Fimaging.nikon.com%2Flineup%2Fdslr%2Fdf%2Fimg%2Fsample%2Fimg_01.jpg&amp;size=thumbnail"></a></div><blockquote>\n<div class="message_inline_image"><a href="http://imaging.nikon.com/lineup/dslr/df/img/sample/img_02.jpg" target="_blank" title="http://imaging.nikon.com/lineup/dslr/df/img/sample/img_02.jpg"><img data-src-fullsize="/thumbnail?url=http%3A%2F%2Fimaging.nikon.com%2Flineup%2Fdslr%2Fdf%2Fimg%2Fsample%2Fimg_02.jpg&amp;size=full" src="/thumbnail?url=http%3A%2F%2Fimaging.nikon.com%2Flineup%2Fdslr%2Fdf%2Fimg%2Fsample%2Fimg_02.jpg&amp;size=thumbnail"></a></div></blockquote>\n<ul>\n<li><div class="message_inline_image"><a href="http://imaging.nikon.com/lineup/dslr/df/img/sample/img_03.jpg" target="_blank" title="http://imaging.nikon.com/lineup/dslr/df/img/sample/img_03.jpg"><img data-src-fullsize="/thumbnail?url=http%3A%2F%2Fimaging.nikon.com%2Flineup%2Fdslr%2Fdf%2Fimg%2Fsample%2Fimg_03.jpg&amp;size=full" src="/thumbnail?url=http%3A%2F%2Fimaging.nikon.com%2Flineup%2Fdslr%2Fdf%2Fimg%2Fsample%2Fimg_03.jpg&amp;size=thumbnail"></a></div></li>\n<li><div class="message_inline_image"><a href="https://www.google.com/images/srpr/logo4w.png" target="_blank" title="https://www.google.com/images/srpr/logo4w.png"><img data-src-fullsize="/thumbnail?url=https%3A%2F%2Fwww.google.com%2Fimages%2Fsrpr%2Flogo4w.png&amp;size=full" src="/thumbnail?url=https%3A%2F%2Fwww.google.com%2Fimages%2Fsrpr%2Flogo4w.png&amp;size=thumbnail"></a></div></li>\n</ul>' + expected = '<div class="message_inline_image"><a href="http://imaging.nikon.com/lineup/dslr/df/img/sample/img_01.jpg" target="_blank" title="http://imaging.nikon.com/lineup/dslr/df/img/sample/img_01.jpg"><img data-src-fullsize="/thumbnail?url=http%3A%2F%2Fimaging.nikon.com%2Flineup%2Fdslr%2Fdf%2Fimg%2Fsample%2Fimg_01.jpg&amp;size=full" src="/thumbnail?url=http%3A%2F%2Fimaging.nikon.com%2Flineup%2Fdslr%2Fdf%2Fimg%2Fsample%2Fimg_01.jpg&amp;size=thumbnail"></a></div><blockquote>\n<p><a href="http://imaging.nikon.com/lineup/dslr/df/img/sample/img_02.jpg" target="_blank" title="http://imaging.nikon.com/lineup/dslr/df/img/sample/img_02.jpg">http://imaging.nikon.com/lineup/dslr/df/img/sample/img_02.jpg</a></p>\n</blockquote>\n<ul>\n<li><div class="message_inline_image"><a href="http://imaging.nikon.com/lineup/dslr/df/img/sample/img_03.jpg" target="_blank" title="http://imaging.nikon.com/lineup/dslr/df/img/sample/img_03.jpg"><img data-src-fullsize="/thumbnail?url=http%3A%2F%2Fimaging.nikon.com%2Flineup%2Fdslr%2Fdf%2Fimg%2Fsample%2Fimg_03.jpg&amp;size=full" src="/thumbnail?url=http%3A%2F%2Fimaging.nikon.com%2Flineup%2Fdslr%2Fdf%2Fimg%2Fsample%2Fimg_03.jpg&amp;size=thumbnail"></a></div></li>\n<li><div class="message_inline_image"><a href="https://www.google.com/images/srpr/logo4w.png" target="_blank" title="https://www.google.com/images/srpr/logo4w.png"><img data-src-fullsize="/thumbnail?url=https%3A%2F%2Fwww.google.com%2Fimages%2Fsrpr%2Flogo4w.png&amp;size=full" src="/thumbnail?url=https%3A%2F%2Fwww.google.com%2Fimages%2Fsrpr%2Flogo4w.png&amp;size=thumbnail"></a></div></li>\n</ul>' sender_user_profile = self.example_user('othello') msg = Message(sender=sender_user_profile, sending_client=get_client("test"))
Bugdown: Duplicate links in embeds When you send multiple duplicate links in a single chat message there are two embeds created for the exact same content. Example: (https://www.youtube.com/watch?v=4bfECHhoBBQ https://www.youtube.com/watch?v=4bfECHhoBBQ) would create two embeds for the exact same video
@zulipbot claim Hello @aayushagra, it looks like you've currently claimed 1 issue in this repository. We encourage new contributors to focus their efforts on at most 1 issue at a time, so please complete your work on your other claimed issues before trying to claim this issue again. We look forward to your valuable contributions! Already working on a fix for this. Will claim and create PR once my previous issue is fixed. Hello @zulip/server-markdown members, this issue was labeled with the **area: markdown** label, so you may want to check it out! <!-- areaLabelNotification --> @aayushagra are you still working on this? If not, I will like to take this up @shreyanshdwivedi The issue already has a pending PR
2019-06-04T14:16:21
zulip/zulip
12,755
zulip__zulip-12755
[ "12580" ]
fe59c31e85a6b4ebdd3b9068d3c7fd0593ba11bd
diff --git a/zerver/views/documentation.py b/zerver/views/documentation.py --- a/zerver/views/documentation.py +++ b/zerver/views/documentation.py @@ -162,6 +162,10 @@ def add_integrations_open_graph_context(context: Dict[str, Any], request: HttpRe context['OPEN_GRAPH_TITLE'] = 'Connect your {category} tools to Zulip'.format(category=category) context['OPEN_GRAPH_DESCRIPTION'] = description + elif path_name == 'integrations': + context['OPEN_GRAPH_TITLE'] = 'Connect the tools you use to Zulip' + context['OPEN_GRAPH_DESCRIPTION'] = description + class IntegrationView(ApiURLView): template_name = 'zerver/integrations/index.html'
diff --git a/zerver/tests/test_docs.py b/zerver/tests/test_docs.py --- a/zerver/tests/test_docs.py +++ b/zerver/tests/test_docs.py @@ -155,6 +155,21 @@ def test_doc_endpoints(self) -> None: result = self.client_get('/static/favicon.ico') self.assertEqual(result.status_code, 200) + def test_portico_pages_open_graph_metadata(self) -> None: + # Why Zulip + url = '/why-zulip/' + title = '<meta property="og:title" content="Team chat with first-class threading">' + description = '<meta property="og:description" content="Most team chats are overwhelming' + self._test(url, title, doc_html_str=True) + self._test(url, description, doc_html_str=True) + + # Features + url = '/features/' + title = '<meta property="og:title" content="Zulip Features">' + description = '<meta property="og:description" content="First class threading' + self._test(url, title, doc_html_str=True) + self._test(url, description, doc_html_str=True) + @slow("Tests dozens of endpoints, including all our integrations docs") def test_integration_doc_endpoints(self) -> None: self._test('/integrations/', @@ -184,6 +199,13 @@ def test_integration_pages_open_graph_metadata(self) -> None: self._test(url, title, doc_html_str=True) self._test(url, description, doc_html_str=True) + # Test integrations page + url = '/integrations/' + title = '<meta property="og:title" content="Connect the tools you use to Zulip">' + description = '<meta property="og:description" content="Zulip comes with over' + self._test(url, title, doc_html_str=True) + self._test(url, description, doc_html_str=True) + def test_email_integration(self) -> None: self._test('/integrations/doc-html/email', 'support+abcdefg@testserver', doc_html_str=True)
portico/docs: Improve OpenAPI descriptions and titles. Followups to https://github.com/zulip/zulip/pull/11045. - [ ] Currently we only use the first paragraph of text for the description. It would be better to concatenate the first few paragraphs, and truncate. Basically it's just using bs.find_all('p') and joining things together in a loop until it gets too long. One catch is that is that we probably want to stop at the first non-paragraph block so we don't cross a list or something. - [ ] Currently, we've just tested our openAPI things with /api and /help. We should extend to the /integrations pages, and also figure out what we want to do for the top-10 major portico pages (especially /features, /why-zulip, and /for/open-source), which could benefit from a similar treatment. To work on this I would start by skimming the code from #11045.
Hello @zulip/server-misc members, this issue was labeled with the "area: misc" label, so you may want to check it out! <!-- areaLabelAddition --> @punchagan FYI in case you're interested in working on this. Yeah, I will work on this. @zulipbot claim The first item about using only the first paragraph was fixed in https://github.com/zulip/zulip/commit/e1f02dc6f25c58975e54d8888db6eb28e93edf7c with further improvements in other future commits. (edit: Changed the link to point to the correct commit) The integrations page is populated using Ajax calls, so re-using the code that populates the meta tags for the /help pages may not work very well. I've a draft PR in #12586 that allows for adding some simple handwritten open graph tags, with just the integration name, etc being dynamic based on the URL. For the portico pages like /features, we just render the template as a view, and again it seems quite tricky to re-use the code that populates the meta tags for /help pages. Would it be acceptable to allow adding these meta tags directly to the template, especially since there are only a handful of these page? I think for /integrations, just having a brief summary that it's the integration for X is probably the main thing that's useful; the actual instructions are unlikely to be that helpful without going to the page anyway? Yeah, that's what I have in mind. :+1: Didn't mean to close it! Sorry! (The UI for GitHub's Projects is slightly confusing. I've been using it to track issues that I'm working on, and I just wanted to close the expanded view of this issue in it, but ended up closing the issue! The `x` on top goes away when I scroll down on the issue's details, and I clicked the close issue button :see_no_evil: ![image](https://user-images.githubusercontent.com/315678/59577787-ae224180-90e2-11e9-8a14-b86436979a83.png)) Hello @punchagan, you have been unassigned from this issue because you have not updated this issue or any referenced pull requests for over 14 days. You can reclaim this issue or claim any other issue by commenting `@zulipbot claim` on that issue. Thanks for your contributions, and hope to see you again soon! @zulipbot Still working on it
2019-07-10T19:45:41
zulip/zulip
13,013
zulip__zulip-13013
[ "11395" ]
c6b3d0212d06a671f61333bf353cbc5094492e1d
diff --git a/docs/conf.py b/docs/conf.py --- a/docs/conf.py +++ b/docs/conf.py @@ -308,8 +308,18 @@ # You can specify multiple suffix as a list of string: source_suffix = ['.rst', '.md'] +# Temporary workaround to supress warnings after upgrading to recommonmark==0.5.0 +# Otherwise, sphinx build complains about all the links ending in .html +# See PR # for more details +def on_missing_reference(app, env, node, contnode): + if node['reftype'] == 'any': + return contnode + else: + return None + def setup(app: Any) -> None: + app.connect('missing-reference', on_missing_reference) app.add_config_value('recommonmark_config', { 'enable_eval_rst': True, # Turn off recommonmark features we aren't using.
Upgrade recommonmark pip package to latest version update-locked-requirements fails due to compatibility issues. This is probably a dead end since we can't upgrade CommonMark. ``` irtualenv-clone==0.4.0 wcwidth==0.1.7 # via prompt-toolkit yamole==2.1.6 Could not find a tag or branch '60ed2431c07686a12f2770b2d852c5650f3ccfc6', assuming commit. Requested libthumbor==1.3.2zulip from git+https://github.com/zulip/libthumbor.git@60ed2431c07686a12f2770b2d852c5650f3ccfc6#egg=libthumbor==1.3.2zulip (from -r requirements/common.in (line 20 8)), but installing version 1.3.2 Could not find a tag or branch '7d8bdc4dbcfcc5a73298747293b99fe53da55315', assuming commit. Requested talon==1.2.10.zulip1 from git+https://github.com/zulip/talon.git@7d8bdc4dbcfcc5a73298747293b99fe53da55315#egg=talon==1.2.10.zulip1 (from -r requirements/common.in (line 76)), but i nstalling version 1.2.11 Requested zulip_bots==0.5.8+git from git+https://github.com/zulip/[email protected]#egg=zulip_bots==0.5.8+git&subdirectory=zulip_bots (from -r requirements/common.in (line 179)), bu t installing version 0.5.8 Could not find a tag or branch '70ac02bec', assuming commit. Requested ujson==1.35+git from git+https://github.com/zulip/ultrajson@70ac02bec#egg=ujson==1.35+git (from -r requirements/common.in (line 146)), but installing version 1.35 Requested zulip==0.5.8_git from git+https://github.com/zulip/[email protected]#egg=zulip==0.5.8_git&subdirectory=zulip (from -r requirements/common.in (line 178)), but installing ve rsion 0.5.8 Could not find a tag or branch '0d2b15cdb5af5ddec88d41cac19c0f2ce1b1ad38', assuming commit. Requested django-bitfield==1.9.3+dev.0d2b15cdb5af5ddec88d41cac19c0f2ce1b1ad38 from git+https://github.com/zulip/django-bitfield@0d2b15cdb5af5ddec88d41cac19c0f2ce1b1ad38#egg=django-bitfield== 1.9.3+dev.0d2b15cdb5af5ddec88d41cac19c0f2ce1b1ad38 (from -r requirements/common.in (line 58)), but installing version 1.9.3 Could not find a version that matches CommonMark==0.5.4,>=0.7.3 Tried: 0.5.0, 0.5.1, 0.5.2, 0.5.3, 0.5.4, 0.5.5, 0.5.5, 0.6.0, 0.6.0, 0.6.1, 0.6.1, 0.6.2, 0.6.2, 0.6.3, 0.6.3, 0.6.4, 0.6.4, 0.7.0, 0.7.0, 0.7.1, 0.7.1, 0.7.2, 0.7.2, 0.7.3, 0.7.3, 0.7.4, 0 .7.4, 0.7.5, 0.7.5, 0.8.0, 0.8.0, 0.8.1, 0.8.1 ``` @zulipbot label "area: dependencies"
Hello @zulip/server-dependencies members, this issue was labeled with the "area: dependencies" label, so you may want to check it out! <!-- areaLabelAddition --> I tried upgrading this since the dependecy issue with CommonMark got fixed. I am getting the following error. https://circleci.com/gh/hackerkid/zulip/2978?utm_campaign=vcs-integration-link&utm_medium=referral&utm_source=github-build-link ``` Mar 06 16:09:33 The HTML pages are in _build/html. Testing only internal links in documentation... 2019-03-06 16:09:39 [scrapy.core.scraper] ERROR: Spider error processing <HEAD https://github.com/zulip/python-zulip-api/blob/master/zulip_bots/README> (referer: None) Traceback (most recent call last): File "/srv/zulip-venv-cache/fd223bc2863de13953a50ef5ef5d691eaca4ebe8/zulip-py3-venv/lib/python3.6/site-packages/twisted/internet/defer.py", line 654, in _runCallbacks current.result = callback(current.result, *args, **kw) File "/home/circleci/zulip/tools/documentation_crawler/documentation_crawler/spiders/common/spiders.py", line 116, in error_callback raise Exception('Page not found: {}'.format(response)) Exception: Page not found: <404 https://github.com/zulip/python-zulip-api/blob/master/zulip_bots/README> 2019-03-06 16:09:40 [scrapy.core.scraper] ERROR: Spider error processing <HEAD https://github.com/zulip/zulip/blob/master/requirements/README> (referer: None) Traceback (most recent call last): File "/srv/zulip-venv-cache/fd223bc2863de13953a50ef5ef5d691eaca4ebe8/zulip-py3-venv/lib/python3.6/site-packages/twisted/internet/defer.py", line 654, in _runCallbacks current.result = callback(current.result, *args, **kw) File "/home/circleci/zulip/tools/documentation_crawler/documentation_crawler/spiders/common/spiders.py", line 116, in error_callback raise Exception('Page not found: {}'.format(response)) Exception: Page not found: <404 https://github.com/zulip/zulip/blob/master/requirements/README> 2019-03-06 16:09:50 [scrapy.core.scraper] ERROR: Spider error processing <HEAD https://github.com/zulip/zulipbot/blob/master/.github/CONTRIBUTING> (referer: None) Traceback (most recent call last): File "/srv/zulip-venv-cache/fd223bc2863de13953a50ef5ef5d691eaca4ebe8/zulip-py3-venv/lib/python3.6/site-packages/twisted/internet/defer.py", line 654, in _runCallbacks current.result = callback(current.result, *args, **kw) File "/home/circleci/zulip/tools/documentation_crawler/documentation_crawler/spiders/common/spiders.py", line 116, in error_callback raise Exception('Page not found: {}'.format(response)) Exception: Page not found: <404 https://github.com/zulip/zulipbot/blob/master/.github/CONTRIBUTING> Failed! Exited with code 1 ``` It looks like the updated version of RecommonMark is rewriting GitHub link URLs to an invalid version. @drrosa this may be a reasonable project for you to pick up. https://github.com/zulip/zulip/pull/11719 has some potentially relevant context (it seems like possibly the ReadTheDocs toolchain of CommonMark/RecommonMark are somehow trying to solve the same problem we were trying to solve in #11719?) Needs some investigation. Sounds good! I'll look into it. Tonight I got a chance to investigate this issue. Currently, `../tools/build-docs` results in lots of warnings e.g. `/zulip/docs/tutorials/writing-views.md:367: WARNING: None:any reference target not found: ../subsystems/client.html` but the docs still build correctly and the links with warnings work just fine. I then changed that markdown link from `client.html` to `client.md` and that made the warning go away. The link also gets converted correctly to `client.html` after building the docs. So as @timabbott suspected recommonmark==0.5.0 now allows `.md` links between doc pages! The docs also build correctly with the [latest CommonMark](https://pypi.org/project/commonmark/#history)==0.9.0 version. ``../tools/build-docs`` with the [latest recommonmark](https://pypi.org/project/recommonmark/#history)==0.6.0 gives the following error: ``` Running Sphinx v1.8.4 making output directory... building [mo]: targets for 0 po files that are out of date building [html]: targets for 135 source files that are out of date updating environment: 135 added, 0 changed, 0 removed /Volumes/Storage/goinfre/drosa-ta/zulip-dev/venv-py3-zulip-rtd/lib/python3.6/site-packages/recommonmark/parser.py:75: UserWarning: Container node skipped: type=document warn("Container node skipped: type={0}".format(mdnode.t)) /Volumes/Storage/goinfre/drosa-ta/zulip-dev/venv-py3-zulip-rtd/lib/python3.6/site-packages/recommonmark/parser.py:75: UserWarning: Container node skipped: type=document warn("Container node skipped: type={0}".format(mdnode.t)) /Volumes/Storage/goinfre/drosa-ta/zulip-dev/venv-py3-zulip-rtd/lib/python3.6/site-packages/recommonmark/parser.py:75: UserWarning: Container node skipped: type=document warn("Container node skipped: type={0}".format(mdnode.t)) /Volumes/Storage/goinfre/drosa-ta/zulip-dev/venv-py3-zulip-rtd/lib/python3.6/site-packages/recommonmark/parser.py:75: UserWarning: Container node skipped: type=document warn("Container node skipped: type={0}".format(mdnode.t)) /Volumes/Storage/goinfre/drosa-ta/zulip-dev/venv-py3-zulip-rtd/lib/python3.6/site-packages/recommonmark/parser.py:75: UserWarning: Container node skipped: type=document warn("Container node skipped: type={0}".format(mdnode.t)) /Volumes/Storage/goinfre/drosa-ta/zulip-dev/venv-py3-zulip-rtd/lib/python3.6/site-packages/recommonmark/parser.py:75: UserWarning: Container node skipped: type=document warn("Container node skipped: type={0}".format(mdnode.t)) /Volumes/Storage/goinfre/drosa-ta/zulip-dev/venv-py3-zulip-rtd/lib/python3.6/site-packages/recommonmark/parser.py:75: UserWarning: Container node skipped: type=document warn("Container node skipped: type={0}".format(mdnode.t)) /Volumes/Storage/goinfre/drosa-ta/zulip-dev/venv-py3-zulip-rtd/lib/python3.6/site-packages/recommonmark/parser.py:75: UserWarning: Container node skipped: type=document warn("Container node skipped: type={0}".format(mdnode.t)) Sphinx parallel build error: TypeError: sequence item 1: expected str instance, NoneType found ``` OK; want to do a prep PR that just upgrades to 0.5.0 without breaking anything?
2019-08-16T02:03:48
zulip/zulip
13,067
zulip__zulip-13067
[ "12374", "12374" ]
e6340c6e54a572d58bc1039430a8f3a461635f00
diff --git a/version.py b/version.py --- a/version.py +++ b/version.py @@ -26,4 +26,4 @@ # historical commits sharing the same major version, in which case a # minor version bump suffices. -PROVISION_VERSION = '49.1' +PROVISION_VERSION = '49.2'
Clean up `update-locked-requirements` and `requirements.in` files to remove `-e` hackery. It looks like https://github.com/jazzband/pip-tools/pull/807 was included in the latest `pip-tools` release 12 days ago. I think this may mean we can get rid of our semantically incorrect usage of `-e` in our requirements files, which in turn may mean we can remove most of the messy code in `tools/update-locked-requirements` related to hackily removing the `-e` lines. See `compile_requirements` in that file for details. My guess is that this means if we upgrade pip-tools, we can delete 50% of the code in `update-locked-requirements` and clean up our `requirements.in` files to not use `-e`. @hackerkid this might be a good project for you. Clean up `update-locked-requirements` and `requirements.in` files to remove `-e` hackery. It looks like https://github.com/jazzband/pip-tools/pull/807 was included in the latest `pip-tools` release 12 days ago. I think this may mean we can get rid of our semantically incorrect usage of `-e` in our requirements files, which in turn may mean we can remove most of the messy code in `tools/update-locked-requirements` related to hackily removing the `-e` lines. See `compile_requirements` in that file for details. My guess is that this means if we upgrade pip-tools, we can delete 50% of the code in `update-locked-requirements` and clean up our `requirements.in` files to not use `-e`. @hackerkid this might be a good project for you.
Hello @zulip/server-tooling members, this issue was labeled with the "area: tooling" label, so you may want to check it out! <!-- areaLabelAddition --> @zulipbot claim Hello @zulip/server-tooling members, this issue was labeled with the "area: tooling" label, so you may want to check it out! <!-- areaLabelAddition --> @zulipbot claim
2019-08-25T00:23:56
zulip/zulip
13,075
zulip__zulip-13075
[ "12800" ]
b062e8332f87638e714e35ad045c5cba04075536
diff --git a/zerver/lib/bugdown/__init__.py b/zerver/lib/bugdown/__init__.py --- a/zerver/lib/bugdown/__init__.py +++ b/zerver/lib/bugdown/__init__.py @@ -1814,6 +1814,51 @@ def get_sub_registry(r: markdown.util.Registry, keys: List[str]) -> markdown.uti DEFAULT_BUGDOWN_KEY = -1 ZEPHYR_MIRROR_BUGDOWN_KEY = -2 +class NormalizeWhitespace(markdown.preprocessors.NormalizeWhitespace): + """ Upstream NormalizeWhitespace strays away from its stated goal + by also stripping the characters that the HtmlStash uses to mark the + start/end of the stashed text. This means that we cannot run the upstream + preprocessor after we have already stashed some Html blocks. + + Our NormalizeWhitespace is exactly the same as upstream except for one line + marked below. """ + + def run(self, lines: List[str]) -> List[str]: + source = '\n'.join(lines) + # source = source.replace(util.STX, "").replace(util.ETX, "") # Zulip change: we comment this line. + source = source.replace("\r\n", "\n").replace("\r", "\n") + "\n\n" + source = source.expandtabs(self.md.tab_length) + source = re.sub(r'(?<=\n) +\n', '\n', source) + return source.split('\n') + +class BlockParser(markdown.blockprocessors.BlockParser): + """ Upstream doesn't support running preprocessors when parsing blocks, which + is needed for issues like https://github.com/zulip/zulip/issues/12800. This + subclass retains the default upstream behavior for all blocks except blockquotes. + """ + def __init__(self, md: markdown.Markdown) -> None: + super().__init__(md) + self.custom_normalize_whitespace = NormalizeWhitespace(self.md) + + def run_preprocessors(self, block: str) -> str: + # Due to the restriction described in NormalizeWhitespace, we need to + # run our subclassed NormalizeWhitespace preprocessor instead of the + # upstream version when we run the preprocessors inside a block. + lines = block.split("\n") + whitespace = self.md.preprocessors.__getitem__('normalize_whitespace') + for prep in self.md.preprocessors: + if prep == whitespace: + lines = self.custom_normalize_whitespace.run(lines) + else: + lines = prep.run(lines) + return '\n'.join(lines) + + def parseChunk(self, parent: Element, text: str) -> None: + if parent.tag == 'blockquote': + # Rerun preprocessors, mainly to detect fenced_code inside quotes. + text = self.run_preprocessors(text) + super().parseChunk(parent, text) + class Bugdown(markdown.Markdown): def __init__(self, *args: Any, **kwargs: Union[bool, int, List[Any]]) -> None: # define default configs @@ -1864,7 +1909,7 @@ def build_block_parser(self) -> markdown.util.Registry: # olist - replaced by ours # ulist - replaced by ours # quote - replaced by ours - parser = markdown.blockprocessors.BlockParser(self) + parser = BlockParser(self) parser.blockprocessors.register(markdown.blockprocessors.EmptyBlockProcessor(parser), 'empty', 95) parser.blockprocessors.register(ListIndentProcessor(parser), 'indent', 90) if not self.getConfig('code_block_processor_disabled'): diff --git a/zerver/lib/bugdown/fenced_code.py b/zerver/lib/bugdown/fenced_code.py --- a/zerver/lib/bugdown/fenced_code.py +++ b/zerver/lib/bugdown/fenced_code.py @@ -293,7 +293,6 @@ def run(self, lines: Iterable[str]) -> List[str]: handler = OuterHandler(processor, output, self.run_content_validators) self.push(handler) - for line in lines: self.handlers[-1].handle_line(line)
diff --git a/zerver/tests/fixtures/markdown_test_cases.json b/zerver/tests/fixtures/markdown_test_cases.json --- a/zerver/tests/fixtures/markdown_test_cases.json +++ b/zerver/tests/fixtures/markdown_test_cases.json @@ -94,6 +94,12 @@ "input": ">\n>\ntext", "expected_output": "<blockquote>\n<p>text</p>\n</blockquote>" }, + { + "name": "blockquote_with_fenced_code", + "input": "> ```\n> multilne\n> codeblock\n> ```", + "expected_output": "<blockquote>\n<div class=\"codehilite\"><pre><span></span>multilne\ncodeblock\n</pre></div>\n\n\n</blockquote>", + "marked_expected_output": "<blockquote>\n<div class=\"codehilite\"><pre>multilne\ncodeblock\n</pre></div>\n\n\n</blockquote>" + }, { "name": "fenced_quote_with_hashtag", "input": "```quote\n# line 1\n#line 2\n```",
Block quotes (>) and fenced code blocks don't interact nicely When you try to block-quote a code block, it.. really doesn't work right. The best way of explaining this is with images: (Apologies for the wide screenshots) ![](https://hydraz.semi.works/img/91a18.png) Source for the message: ![](https://hydraz.semi.works/img/4fe77.png) I expected something more like Pandoc's rendering, where the whole code block is block-quoted: ![](https://hydraz.semi.works/img/95884.png) ```html <blockquote> <div class="sourceCode" id="cb1"> <pre class="sourceCode haskell"> <code class="sourceCode haskell"> <span id="cb1-1"> <a href="#cb1-1"></a> <span class="ot">foo ::</span> <span class="dt">Int</span> <span class="ot">-&gt;</span> <span class="dt">Int</span> </span> <span id="cb1-2"> <a href="#cb1-2"></a>foo x <span class="ot">=</span> x <span class="op">+</span> <span class="dv">5</span> </span> </code> </pre> </div> </blockquote> ```
@aero31aero this is a duplicate, right? I don't think so. Are you thinking of #12446? Ahh, I think so. I'm not sure the syntax proposed here makes sense; we might want to use the syntax in #12446 for this purpose. Hello @zulip/server-markdown, @zulip/server-message-view members, this issue was labeled with the "area: markdown", "area: message view" labels, so you may want to check it out! <!-- areaLabelAddition --> ~~I can't see the images here anymore, but I think end-quote markers in #13001 would fix this.~~ Edit: no, this is a separate issue, but the commonmark implementation doesn't agree with the proposed syntax here: ``` <blockquote> <pre> <code class="language-js"></code> </pre> </blockquote> <p> let y = 1; console.log(y); </p> <pre> <code></code> </pre> ``` for ~~~ > ```js let y = 1; console.log(y); ``` ~~~ It’s not a bug that we don’t support the incorrect syntax where you only quote the first line of the code block, but it is a bug that we don’t support the CommonMark syntax where you quote every line of the code block: ```md > ```haskell > foo :: Int -> Int > foo x = x + 5 > ``` ```
2019-08-26T11:44:19
zulip/zulip
13,077
zulip__zulip-13077
[ "13068" ]
f1b91e577e5356ac9beb1ceaad85881c2f5af431
diff --git a/version.py b/version.py --- a/version.py +++ b/version.py @@ -26,4 +26,4 @@ # historical commits sharing the same major version, in which case a # minor version bump suffices. -PROVISION_VERSION = '49.2' +PROVISION_VERSION = '49.3'
Upgrade pip from 19.1.1 and pip-tools from 3.8.0 Followup issue from #13067. pip-tools 3.9.0 or 4.0.0 fails to resolve dependencies from Git URLs (jazzband/pip-tools#851): `pip._internal.exceptions.DistributionNotFound: No matching distribution found for zulip==0.6.1_git (from -r requirements/common.in (line 135))` while pip 19.2 breaks pip-tools 3.8.0 (jazzband/pip-tools#853): `TypeError: __init__() got an unexpected keyword argument 'find_links'`
Hello @zulip/server-dependencies members, this issue was labeled with the "area: dependencies" label, so you may want to check it out! <!-- areaLabelAddition --> I opened a possible fix as jazzband/pip-tools#879; merged for the next pip-tools release (presumably 4.1.0).
2019-08-26T20:03:21
zulip/zulip
13,131
zulip__zulip-13131
[ "13130" ]
df134be23501cb24f5abd548ddcdd5bc0db7d074
diff --git a/zproject/backends.py b/zproject/backends.py --- a/zproject/backends.py +++ b/zproject/backends.py @@ -17,12 +17,13 @@ import magic from typing import Any, Dict, List, Optional, Set, Tuple, Union -from django_auth_ldap.backend import LDAPBackend, _LDAPUser +from django_auth_ldap.backend import LDAPBackend, _LDAPUser, ldap_error from django.contrib.auth import get_backends from django.contrib.auth.backends import RemoteUserBackend from django.conf import settings from django.core.exceptions import ValidationError from django.core.validators import validate_email +from django.dispatch import receiver, Signal from django.http import HttpResponse, HttpResponseRedirect from django.shortcuts import render from django.urls import reverse @@ -566,6 +567,24 @@ def authenticate(self, *, username: str, password: str, realm: Realm, return_data: Optional[Dict[str, Any]]=None) -> Optional[UserProfile]: return None +class PopulateUserLDAPError(ZulipLDAPException): + pass + +@receiver(ldap_error, sender=ZulipLDAPUserPopulator) +def catch_ldap_error(signal: Signal, **kwargs: Any) -> None: + """ + Inside django_auth_ldap populate_user(), if LDAPError is raised, + e.g. due to invalid connection credentials, the function catches it + and emits a signal (ldap_error) to communicate this error to others. + We normally don't use signals, but here there's no choice, so in this function + we essentially convert the signal to a normal exception that will properly + propagate out of django_auth_ldap internals. + """ + if kwargs['context'] == 'populate_user': + # The exception message can contain the password (if it was invalid), + # so it seems better not to log that, and only use the original exception's name here. + raise PopulateUserLDAPError(kwargs['exception'].__class__.__name__) + def sync_user_from_ldap(user_profile: UserProfile) -> bool: backend = ZulipLDAPUserPopulator() updated_user = backend.populate_user(backend.django_to_ldap_username(user_profile.email))
diff --git a/zerver/tests/test_auth_backends.py b/zerver/tests/test_auth_backends.py --- a/zerver/tests/test_auth_backends.py +++ b/zerver/tests/test_auth_backends.py @@ -55,7 +55,8 @@ dev_auth_enabled, password_auth_enabled, github_auth_enabled, google_auth_enabled, \ require_email_format_usernames, AUTH_BACKEND_NAME_MAP, \ ZulipLDAPConfigurationError, ZulipLDAPExceptionOutsideDomain, \ - ZulipLDAPException, query_ldap, sync_user_from_ldap, SocialAuthMixin + ZulipLDAPException, query_ldap, sync_user_from_ldap, SocialAuthMixin, \ + PopulateUserLDAPError from zerver.views.auth import (maybe_send_to_registration, _subdomain_token_salt) @@ -2601,6 +2602,22 @@ def perform_ldap_sync(self, user_profile: UserProfile) -> None: result = sync_user_from_ldap(user_profile) self.assertTrue(result) + @mock.patch("zproject.backends.do_deactivate_user") + def test_ldap_auth_error_doesnt_deactivate_user(self, mock_deactivate: mock.MagicMock) -> None: + """ + This is a test for a bug where failure to connect to LDAP in sync_user_from_ldap + (e.g. due to invalid credentials) would cause the user to be deactivated if + LDAP_DEACTIVATE_NON_MATCHING_USERS was True. + Details: https://github.com/zulip/zulip/issues/13130 + """ + with self.settings( + LDAP_DEACTIVATE_NON_MATCHING_USERS=True, + LDAP_APPEND_DOMAIN='zulip.com', + AUTH_LDAP_BIND_PASSWORD='wrongpass'): + with self.assertRaises(PopulateUserLDAPError): + sync_user_from_ldap(self.example_user('hamlet')) + mock_deactivate.assert_not_called() + def test_update_full_name(self) -> None: self.mock_ldap.directory = { 'uid=hamlet,ou=users,dc=zulip,dc=com': {
sync_ldap_user_data can deactivate all users if LDAP connection fails Reported here: https://chat.zulip.org/#narrow/stream/31-production-help/topic/sync_ldap_user_data.20in.202.2E0.2E4/near/784741 If ``ZulipLDAPAuthBackend`` is the only backend enabled, ``sync_ldap_user_data`` will deactivate all users if it can't connect to LDAP. This is because ``LDAP_DEACTIVATE_NON_MATCHING_USERS`` defaults to ``True`` if LDAP is the only backend enabled - and then we have this piece of code in ``sync_user_from_ldap``: ``` updated_user = backend.populate_user(backend.django_to_ldap_username(user_profile.email)) if not updated_user: if settings.LDAP_DEACTIVATE_NON_MATCHING_USERS: do_deactivate_user(user_profile) ``` ``backend.populate_user`` will return None if the connection fails, causing user deactivation - because of the way the function is written in ``django_auth_ldap`` module. The only way the module gives us to "catch" such failures is by catching a signal it sends: ``` def populate_user(self): """ Populates the Django user object using the default bind credentials. """ user = None try: # self.attrs will only be non-None if we were able to load this user # from the LDAP directory, so this filters out nonexistent users. if self.attrs is not None: self._get_or_create_user(force_populate=True) user = self._user except ldap.LDAPError as e: results = ldap_error.send( self.backend.__class__, context="populate_user", user=self._user, exception=e, ) if len(results) == 0: logger.warning( "Caught LDAPError while authenticating {}: {}".format( self._username, pprint.pformat(e) ) ) except Exception as e: logger.warning("{} while authenticating {}".format(e, self._username)) raise return user ```
2019-09-04T08:29:22
zulip/zulip
13,136
zulip__zulip-13136
[ "13134" ]
30440cf466252dcfef691fa508aff4bac8ff1bed
diff --git a/zerver/lib/actions.py b/zerver/lib/actions.py --- a/zerver/lib/actions.py +++ b/zerver/lib/actions.py @@ -2696,7 +2696,7 @@ def notify_subscriptions_added(user_profile: UserProfile, is_web_public=stream.is_web_public, is_announcement_only=stream.is_announcement_only, color=subscription.color, - email_address=encode_email_address(stream), + email_address=encode_email_address(stream, show_sender=True), desktop_notifications=subscription.desktop_notifications, audible_notifications=subscription.audible_notifications, push_notifications=subscription.push_notifications, @@ -3557,7 +3557,7 @@ def do_rename_stream(stream: Stream, # date field in all cases. cache_delete_many( to_dict_cache_key_id(message.id) for message in messages) - new_email = encode_email_address(stream) + new_email = encode_email_address(stream, show_sender=True) # We will tell our users to essentially # update stream.name = new_name where name = old_name @@ -4679,6 +4679,8 @@ def gather_subscriptions_helper(user_profile: UserProfile, if not sub["active"] and user_profile.is_guest: subscribers = None + email_address = encode_email_address_helper(stream["name"], stream["email_token"], + show_sender=True) stream_dict = {'name': stream["name"], 'in_home_view': not sub["is_muted"], 'is_muted': sub["is_muted"], @@ -4699,7 +4701,7 @@ def gather_subscriptions_helper(user_profile: UserProfile, 'stream_weekly_traffic': get_average_weekly_stream_traffic(stream["id"], stream["date_created"], recent_traffic), - 'email_address': encode_email_address_helper(stream["name"], stream["email_token"]), + 'email_address': email_address, 'history_public_to_subscribers': stream['history_public_to_subscribers']} if subscribers is not None: diff --git a/zerver/lib/email_mirror_helpers.py b/zerver/lib/email_mirror_helpers.py --- a/zerver/lib/email_mirror_helpers.py +++ b/zerver/lib/email_mirror_helpers.py @@ -26,10 +26,10 @@ def get_email_gateway_message_string_from_address(address: str) -> str: return msg_string -def encode_email_address(stream: Stream) -> str: - return encode_email_address_helper(stream.name, stream.email_token) +def encode_email_address(stream: Stream, show_sender: bool=False) -> str: + return encode_email_address_helper(stream.name, stream.email_token, show_sender) -def encode_email_address_helper(name: str, email_token: str) -> str: +def encode_email_address_helper(name: str, email_token: str, show_sender: bool=False) -> str: # Some deployments may not use the email gateway if settings.EMAIL_GATEWAY_PATTERN == '': return '' @@ -52,6 +52,9 @@ def encode_email_address_helper(name: str, email_token: str) -> str: else: encoded_token = email_token + if show_sender: + encoded_token += ".show-sender" + return settings.EMAIL_GATEWAY_PATTERN % (encoded_token,) def decode_email_address(email: str) -> Tuple[str, Dict[str, bool]]:
diff --git a/frontend_tests/node_tests/templates.js b/frontend_tests/node_tests/templates.js --- a/frontend_tests/node_tests/templates.js +++ b/frontend_tests/node_tests/templates.js @@ -641,13 +641,6 @@ run_test('draft_table_body', () => { assert.equal(row_2.find(".message_content").text().trim(), "Private draft"); }); - -run_test('email_address_hint', () => { - var html = render('email_address_hint'); - var li = $(html).find("li").first(); - assert.equal(li.text(), 'translated: The email will be forwarded to this stream'); -}); - run_test('emoji_popover', () => { var args = { class: "emoji-info-popover", diff --git a/zerver/tests/test_email_mirror.py b/zerver/tests/test_email_mirror.py --- a/zerver/tests/test_email_mirror.py +++ b/zerver/tests/test_email_mirror.py @@ -69,16 +69,30 @@ def test_encode_decode(self) -> None: stream_name = 'dev. help' stream = ensure_stream(realm, stream_name) email_address = encode_email_address(stream) - self.assertTrue(email_address.startswith('dev-help')) - self.assertTrue(email_address.endswith('@testserver')) + self.assertEqual(email_address, "dev-help.{}@testserver".format(stream.email_token)) + + # The default form of the email address (with an option - "include-footer"): + token, options = decode_email_address( + "dev-help.{}.include-footer@testserver".format(stream.email_token) + ) + self._assert_options(options, include_footer=True) + self.assertEqual(token, stream.email_token) + + # Using + instead of . as the separator is also supported for backwards compatibility, + # since that was the original form of addresses that we used: + token, options = decode_email_address( + "dev-help+{}+include-footer@testserver".format(stream.email_token) + ) + self._assert_options(options, include_footer=True) + self.assertEqual(token, stream.email_token) + token, options = decode_email_address(email_address) self._assert_options(options) self.assertEqual(token, stream.email_token) - parts = email_address.split('@') - # Use a mix of + and . as separators, to test that it works: - parts[0] += "+include-footer.show-sender+include-quotes" - email_address_all_options = '@'.join(parts) + # We also handle mixing + and . but it shouldn't be recommended to users. + email_address_all_options = "dev-help.{}+include-footer.show-sender+include-quotes@testserver" + email_address_all_options = email_address_all_options.format(stream.email_token) token, options = decode_email_address(email_address_all_options) self._assert_options(options, show_sender=True, include_footer=True, include_quotes=True) self.assertEqual(token, stream.email_token) @@ -136,6 +150,14 @@ def test_decode_ignores_stream_name(self) -> None: token = decode_email_address(stream_to_address)[0] self.assertEqual(token, stream.email_token) + def test_encode_with_show_sender(self) -> None: + stream = get_stream("Denmark", get_realm("zulip")) + stream_to_address = encode_email_address(stream, show_sender=True) + + token, options = decode_email_address(stream_to_address) + self._assert_options(options, show_sender=True) + self.assertEqual(token, stream.email_token) + class TestGetMissedMessageToken(ZulipTestCase): def test_get_missed_message_token(self) -> None: with self.settings(EMAIL_GATEWAY_PATTERN="%[email protected]"):
stream settings: Use the .show-sender version of email address. In stream settings, we should * show `announce.b64ccc11942283fe612c93a0c57af830.show-sender@zulipdev.com` instead of `[email protected]` * The (?) should just be a `target=_blank` link to /help/message-a-stream-by-email. You can copy how we do this in other places, e.g. to the right of Organization profile at http://localhost:9991/#organization/organization-profile). Here's what it looks like now: ![image](https://user-images.githubusercontent.com/890911/64287477-aec5b980-cf14-11e9-9975-959f29ade08c.png)
Hello @zulip/server-streams members, this issue was labeled with the "area: stream settings" label, so you may want to check it out! <!-- areaLabelAddition --> @zulipbot claim
2019-09-05T10:28:58
zulip/zulip
13,283
zulip__zulip-13283
[ "13277" ]
dfd9ace7fa30231d11b2e84f67a5e225725ffaf0
diff --git a/zerver/views/streams.py b/zerver/views/streams.py --- a/zerver/views/streams.py +++ b/zerver/views/streams.py @@ -257,7 +257,7 @@ def remove_subscriptions_backend( else: people_to_unsub = set([user_profile]) - result = dict(removed=[], not_subscribed=[]) # type: Dict[str, List[str]] + result = dict(removed=[], not_removed=[]) # type: Dict[str, List[str]] (removed, not_subscribed) = bulk_remove_subscriptions(people_to_unsub, streams, request.client, acting_user=user_profile) @@ -265,7 +265,7 @@ def remove_subscriptions_backend( for (subscriber, removed_stream) in removed: result["removed"].append(removed_stream.name) for (subscriber, not_subscribed_stream) in not_subscribed: - result["not_subscribed"].append(not_subscribed_stream.name) + result["not_removed"].append(not_subscribed_stream.name) return json_success(result)
diff --git a/zerver/tests/test_docs.py b/zerver/tests/test_docs.py --- a/zerver/tests/test_docs.py +++ b/zerver/tests/test_docs.py @@ -126,7 +126,7 @@ def test_doc_endpoints(self) -> None: self._test('/api/get-profile', 'takes no arguments') self._test('/api/add-subscriptions', 'authorization_errors_fatal') self._test('/api/create-user', 'zuliprc-admin') - self._test('/api/remove-subscriptions', 'not_subscribed') + self._test('/api/remove-subscriptions', 'not_removed') self._test('/team/', 'industry veterans') self._test('/history/', 'Cambridge, Massachusetts') # Test the i18n version of one of these pages. diff --git a/zerver/tests/test_subs.py b/zerver/tests/test_subs.py --- a/zerver/tests/test_subs.py +++ b/zerver/tests/test_subs.py @@ -991,7 +991,7 @@ def test_admin_remove_others_from_public_stream(self) -> None: other_user_subbed=True) json = self.assert_json_success(result) self.assertEqual(len(json["removed"]), 1) - self.assertEqual(len(json["not_subscribed"]), 0) + self.assertEqual(len(json["not_removed"]), 0) def test_admin_remove_others_from_subbed_private_stream(self) -> None: """ @@ -1003,7 +1003,7 @@ def test_admin_remove_others_from_subbed_private_stream(self) -> None: other_user_subbed=True) json = self.assert_json_success(result) self.assertEqual(len(json["removed"]), 1) - self.assertEqual(len(json["not_subscribed"]), 0) + self.assertEqual(len(json["not_removed"]), 0) def test_admin_remove_others_from_unsubbed_private_stream(self) -> None: """ @@ -1015,7 +1015,7 @@ def test_admin_remove_others_from_unsubbed_private_stream(self) -> None: other_user_subbed=True, other_sub_users=[self.example_user("othello")]) json = self.assert_json_success(result) self.assertEqual(len(json["removed"]), 1) - self.assertEqual(len(json["not_subscribed"]), 0) + self.assertEqual(len(json["not_removed"]), 0) def test_create_stream_policy_setting(self) -> None: """ @@ -1155,7 +1155,7 @@ def test_remove_already_not_subbed(self) -> None: other_user_subbed=False) json = self.assert_json_success(result) self.assertEqual(len(json["removed"]), 0) - self.assertEqual(len(json["not_subscribed"]), 1) + self.assertEqual(len(json["not_removed"]), 1) def test_remove_invalid_user(self) -> None: """ @@ -2839,7 +2839,7 @@ def helper_check_subs_before_and_after_remove(self, subscriptions: List[str], {"msg": "", "removed": ["Denmark", "Scotland", "Verona"], - "not_subscribed": ["Rome"], "result": "success"} + "not_removed": ["Rome"], "result": "success"} """ result = self.client_delete("/json/users/me/subscriptions", {"subscriptions": ujson.dumps(subscriptions)}) @@ -2868,7 +2868,7 @@ def test_successful_subscriptions_remove(self) -> None: try_to_remove = not_subbed[:3] # attempt to remove up to 3 streams not already subbed to streams_to_remove.extend(try_to_remove) self.helper_check_subs_before_and_after_remove(streams_to_remove, - {"removed": self.streams[1:], "not_subscribed": try_to_remove}, + {"removed": self.streams[1:], "not_removed": try_to_remove}, self.test_email, [self.streams[0]], self.test_realm) def test_subscriptions_remove_fake_stream(self) -> None:
Rename not_subscribed key to not_removed in users/me/subscriptions DELETE response The response that would be generated when someone try to remove a user that is not subscribed to a stream is this. ``` { "msg": "", "not_subscribed": [ "new stream" ], "removed": [] "result": "success" } ``` `not_subscribed` should be renamed to `not_removed` since that makes more sense. http://zulip.zulipdev.com:9991/api/remove-subscriptions This should be an easy easy issue to work.
Hello @zulip/server-api members, this issue was labeled with the "area: api", "area: documentation (api and integrations)" labels, so you may want to check it out! <!-- areaLabelAddition --> Hmm, yeah, good catch. I guess one can tell by the fact we're just discovering this that nothing is actually processing that strong, so we should aim to fix this soon so that API callers that do eventually need this API don't have to deal with the old version. @zulipbot assign oh, I forgot... :P @zulipbot claim
2019-10-13T03:51:31
zulip/zulip
13,435
zulip__zulip-13435
[ "12502" ]
89ff62dafa9dbc47d935577d7b176e3d1634b7f6
diff --git a/zerver/lib/events.py b/zerver/lib/events.py --- a/zerver/lib/events.py +++ b/zerver/lib/events.py @@ -145,10 +145,6 @@ def always_want(msg_type: str) -> bool: info for every event type. Defining this at module level makes it easier to mock. ''' - if settings.PRODUCTION and msg_type == "recent_private_conversations": # nocoverage - # Temporary: Don't include recent_private_conversations in production - # by default while the feature is still experimental. - return False return True # Fetch initial data. When event_types is not specified, clients want
diff --git a/frontend_tests/node_tests/pm_conversations.js b/frontend_tests/node_tests/pm_conversations.js --- a/frontend_tests/node_tests/pm_conversations.js +++ b/frontend_tests/node_tests/pm_conversations.js @@ -13,18 +13,44 @@ run_test('partners', () => { assert.equal(pmc.is_partner(user3_id), true); }); +zrequire("people"); + run_test('insert_recent_private_message', () => { + set_global('page_params', { + recent_private_conversations: [ + {user_ids: [1, 2], + max_message_id: 150, + }, + {user_ids: [1], + max_message_id: 111, + }, + {user_ids: [], + max_message_id: 7, + }, + ], + }); + people.initialize_current_user(15); + pmc.recent.initialize(); + + assert.deepEqual(pmc.recent.get(), [ + {user_ids_string: '1,2', max_message_id: 150}, + {user_ids_string: '1', max_message_id: 111}, + {user_ids_string: '15', max_message_id: 7}, + ]); + pmc.recent.insert('1', 1001); pmc.recent.insert('2', 2001); pmc.recent.insert('1', 3001); - // try to backdate user1's timestamp + // try to backdate user1's latest message pmc.recent.insert('1', 555); assert.deepEqual(pmc.recent.get(), [ - {user_ids_string: '1', timestamp: 3001}, - {user_ids_string: '2', timestamp: 2001}, + {user_ids_string: '1', max_message_id: 3001}, + {user_ids_string: '2', max_message_id: 2001}, + {user_ids_string: '1,2', max_message_id: 150}, + {user_ids_string: '15', max_message_id: 7}, ]); - assert.deepEqual(pmc.recent.get_strings(), ['1', '2']); + assert.deepEqual(pmc.recent.get_strings(), ['1', '2', '1,2', '15']); }); diff --git a/frontend_tests/node_tests/ui_init.js b/frontend_tests/node_tests/ui_init.js --- a/frontend_tests/node_tests/ui_init.js +++ b/frontend_tests/node_tests/ui_init.js @@ -103,6 +103,7 @@ zrequire('search'); zrequire('tutorial'); zrequire('notifications'); zrequire('pointer'); +zrequire('pm_conversations'); zrequire('compose_fade'); zrequire('pm_list'); zrequire('list_cursor'); @@ -140,6 +141,7 @@ page_params.unsubscribed = []; page_params.never_subscribed = []; page_params.realm_notifications_stream_id = -1; page_params.unread_msgs = {}; +page_params.recent_private_conversations = []; $('#tab_bar').append = () => {}; $('#compose').filedrop = () => {};
Use new optimized data set for populating "private messages" in left sidebar Since we merged https://github.com/zulip/zulip/pull/11946, it should now be easy to significantly improve the data set used for rendering of the "private messages" by using the same ~1000 recent PMs of history data set available in `/register` (`page_params` in the webapp) that we made available for mobile. We'll need to remove this temporary block from `always_want` in `zerver/lib/events.py`: `settings.PRODUCTION and msg_type == "recent_private_conversations":` to make this work. And then the work should be just accessing the `recent_private_conversations` attribute in `page_params` and using that to initialize our `static/js/pm_conversations.js` library.
Hello @zulip/server-sidebars members, this issue was labeled with the "area: left-sidebar" label, so you may want to check it out! <!-- areaLabelAddition --> @zulipbot claim Hello @vinitS101, you have been unassigned from this issue because you have not updated this issue or any referenced pull requests for over 14 days. You can reclaim this issue or claim any other issue by commenting `@zulipbot claim` on that issue. Thanks for your contributions, and hope to see you again soon! @zulipbot working on this rn. @vinitS101 just wanted to check in on this -- did you make progress or should I find someone else to work on this? I wasn't able to make any substantial progress on this issue. My apologies for not bringing this up before. OK. @YashRE42 if you're looking for a high priority data-centric frontend issue, this could be a good thing to claim. @zulipbot claim Welcome to Zulip, @hashirsarwar! We just sent you an invite to collaborate on this repository at https://github.com/zulip/zulip/invitations. Please accept this invite in order to claim this issue and begin a fun, rewarding experience contributing to Zulip! Here's some tips to get you off to a good start: * Join me on the [Zulip developers' server](https://chat.zulip.org), to get help, chat about this issue, and meet the other developers. * [Unwatch this repository](https://help.github.com/articles/unwatching-repositories/), so that you don't get 100 emails a day. As you work on this issue, you'll also want to refer to the [Zulip code contribution guide](https://zulip.readthedocs.io/en/latest/contributing/index.html), as well as the rest of the developer documentation on that site. See you on the other side (that is, the pull request side)!
2019-11-21T20:08:00
zulip/zulip
13,479
zulip__zulip-13479
[ "13464" ]
ce474ee8cf1df880b374eb044d0ed59237c8b7e9
diff --git a/scripts/setup/generate_secrets.py b/scripts/setup/generate_secrets.py --- a/scripts/setup/generate_secrets.py +++ b/scripts/setup/generate_secrets.py @@ -26,7 +26,6 @@ AUTOGENERATED_SETTINGS = [ 'avatar_salt', 'initial_password_salt', - 'local_database_password', 'rabbitmq_password', 'shared_secret', 'thumbor_key', @@ -85,6 +84,9 @@ def add_secret(name, value): if need_secret(name): add_secret(name, generate_random_token(64)) + if development and need_secret("local_database_password"): + add_secret("local_database_password", generate_random_token(64)) + if need_secret('secret_key'): add_secret('secret_key', generate_django_secretkey()) diff --git a/zproject/config.py b/zproject/config.py --- a/zproject/config.py +++ b/zproject/config.py @@ -28,9 +28,7 @@ def get_secret(key: str, default_value: Optional[str]=None, development_only: bool=False) -> Optional[str]: if development_only and PRODUCTION: return default_value - if secrets_file.has_option('secrets', key): - return secrets_file.get('secrets', key) - return default_value + return secrets_file.get('secrets', key, fallback=default_value) @overload def get_config(section: str, key: str, default_value: str) -> str: @@ -39,9 +37,7 @@ def get_config(section: str, key: str, default_value: str) -> str: def get_config(section: str, key: str, default_value: Optional[str]=None) -> Optional[str]: ... def get_config(section: str, key: str, default_value: Optional[str]=None) -> Optional[str]: - if config_file.has_option(section, key): - return config_file.get(section, key) - return default_value + return config_file.get(section, key, fallback=default_value) def get_from_file_if_exists(path: str) -> str: if os.path.exists(path):
Don't generate useless local_database_password secret in production The `local_database_password` secret is only used in the Zulip development environment, and it's a bug that `generate_secrets.py` generates it in production. We should ensure it doesn't get generated there for new installations, since it's confusing. It might also be helpful to make `generate_secrets.py` remove or comment it on upgrade, though I suppose that carries some risk if the user has written their own code to access that setting. @andersk @mateuszmandera FYI
Hello @zulip/server-production members, this issue was labeled with the "area: production installer" label, so you may want to check it out! <!-- areaLabelAddition -->
2019-12-05T04:55:58
zulip/zulip
13,482
zulip__zulip-13482
[ "13477" ]
1465628c95871fb73da26321c515e008d68a334c
diff --git a/zerver/lib/bugdown/__init__.py b/zerver/lib/bugdown/__init__.py --- a/zerver/lib/bugdown/__init__.py +++ b/zerver/lib/bugdown/__init__.py @@ -1440,17 +1440,13 @@ def __init__(self, parser: Any) -> None: parser.markdown.tab_length = 4 class UListProcessor(sane_lists.SaneUListProcessor): - """ Does not accept '+' or '-' as a bullet character. """ + """ Unordered lists, but with 2-space indent """ def __init__(self, parser: Any) -> None: parser.markdown.tab_length = 2 super().__init__(parser) parser.markdown.tab_length = 4 - self.RE = re.compile('^[ ]{0,%d}[*][ ]+(.*)' % (self.tab_length - 1,)) - self.CHILD_RE = re.compile(r'^[ ]{0,%d}(([*]))[ ]+(.*)' % - (self.tab_length - 1,)) - class ListIndentProcessor(markdown.blockprocessors.ListIndentProcessor): """ Process unordered list blocks. @@ -1502,7 +1498,7 @@ class BugdownListPreprocessor(markdown.preprocessors.Preprocessor): directly after a line of text, and inserts a newline between to satisfy Markdown""" - LI_RE = re.compile(r'^[ ]{0,3}(\*|\d\.)[ ]+(.*)', re.MULTILINE) + LI_RE = re.compile(r'^[ ]{0,3}([*+-]|\d\.)[ ]+(.*)', re.MULTILINE) def run(self, lines: List[str]) -> List[str]: """ Insert a newline between a paragraph and ulist if missing """
diff --git a/zerver/tests/fixtures/markdown_test_cases.json b/zerver/tests/fixtures/markdown_test_cases.json --- a/zerver/tests/fixtures/markdown_test_cases.json +++ b/zerver/tests/fixtures/markdown_test_cases.json @@ -156,11 +156,30 @@ "expected_output": "<p>Hello [world][ref-name]</p>\n<p>[ref-name]: <a href=\"https://google.com\" target=\"_blank\" title=\"https://google.com\">https://google.com</a></p>" }, { - "name": "ulist_standard", + "name": "ulist_standard_1", "input": "Some text with a list:\n\n* One item\n* Two items\n* Three items", "expected_output": "<p>Some text with a list:</p>\n<ul>\n<li>One item</li>\n<li>Two items</li>\n<li>Three items</li>\n</ul>", "text_content": "Some text with a list:\n\nOne item\nTwo items\nThree items\n" }, + { + "name": "ulist_standard_2", + "input": "Some text with a list:\n\n- One item\n- Two items\n- Three items", + "expected_output": "<p>Some text with a list:</p>\n<ul>\n<li>One item</li>\n<li>Two items</li>\n<li>Three items</li>\n</ul>", + "text_content": "Some text with a list:\n\nOne item\nTwo items\nThree items\n" + }, + { + "name": "ulist_standard_3", + "input": "Some text with a list:\n\n+ One item\n+ Two items\n+ Three items", + "expected_output": "<p>Some text with a list:</p>\n<ul>\n<li>One item</li>\n<li>Two items</li>\n<li>Three items</li>\n</ul>", + "text_content": "Some text with a list:\n\nOne item\nTwo items\nThree items\n" + }, + { + "name": "ulist_mixed_bullets", + "input": "Combine four lists:\n\n* One\n- Two\n+ Three\n- Four\n- Five", + "expected_output": "<p>Combine four lists:</p>\n<ul>\n<li>One</li>\n<li>Two</li>\n<li>Three</li>\n<li>Four</li>\n<li>Five</li>\n</ul>", + "marked_expected_output": "<p>Combine four lists:</p>\n<ul>\n<li>One</li>\n</ul>\n<ul>\n<li>Two</li>\n</ul>\n<ul>\n<li>Three</li>\n</ul>\n<ul>\n<li>Four</li>\n<li>Five</li>\n</ul>", + "text_content": "Combine four lists:\n\nOne\nTwo\nThree\nFour\nFive\n" + }, { "name": "ulist_hanging", "input": "Some text with a hanging list:\n* One item\n* Two items\n* Three items",
Bulleted lists with `-` are broken in the backend markdown processor The reproducer is simple: ``` You should be able to do this: - list of things - more things ``` @tommyip @aero31aero FYI. This could have been caused by our recent work on numbered lists?
Hello @zulip/server-markdown members, this issue was labeled with the "area: markdown" label, so you may want to check it out! <!-- areaLabelAddition --> @zulipbot claim
2019-12-06T07:25:51
zulip/zulip
13,485
zulip__zulip-13485
[ "13481" ]
ce474ee8cf1df880b374eb044d0ed59237c8b7e9
diff --git a/version.py b/version.py --- a/version.py +++ b/version.py @@ -26,4 +26,4 @@ # historical commits sharing the same major version, in which case a # minor version bump suffices. -PROVISION_VERSION = '66.1' +PROVISION_VERSION = '66.2'
Mentions of names with non-ASCII characters Some people I want to mention quite often have non-ASCII chars in their names, e.g. "Ståle" (see the little circle above A?) When typing the mention, I'd start with `@St` but when I type the A, the name gets missing from the list, as `'å' != 'a'`. I suggest that Zulip should internally normalize the names to ASCII at least for the mention suggestions (you'd probably have to create manually the unicode -> ascii mapping table based on glyph similarity).
Hello @zulip/server-compose, @zulip/server-i18n, @zulip/server-markdown members, this issue was labeled with the "area: compose", "area: i18n", "area: markdown" labels, so you may want to check it out! <!-- areaLabelAddition --> Well, the bug here is that our logic to remove diacritics does not handle `å`. ``` function query_matches_string(query, source_str, split_char) { source_str = people.remove_diacritics(source_str); query = people.remove_diacritics(query); // When `abc ` with a space at the end is typed in a // contenteditable widget such as the composebox PM section, the // space at the end was a `no break-space (U+00A0)` instead of // `space (U+0020)`, which lead to no matches in those cases. query = query.replace(/\u00A0/g, String.fromCharCode(32)); // If query doesn't contain a separator, we just want an exact // match where query is a substring of one of the target characters. ``` ``` // Diacritic removal from: // https://stackoverflow.com/questions/18236208/perform-a-find-match-with-javascript-ignoring-special-language-characters-acce const diacritic_regexes = { letters_only: /^[a-z]+$/, a: /[áàãâä]/g, e: /[éèëê]/g, i: /[íìïî]/g, o: /[óòöôõ]/g, u: /[úùüû]/g, c: /[ç]/g, n: /[ñ]/g, }; ``` We should be doing something better here. Maybe: * https://www.npmjs.com/package/diacritics or https://www.npmjs.com/package/remove-accents. * https://stackoverflow.com/questions/990904/remove-accents-diacritics-in-a-string-in-javascript * Possibly something involving unicode normalization, though that might have a performance impact. Probably easily solvable via adding a hash table cache for `people.remove_diacritics` if relevant. @andersk FYI as our resident unicode expert :) My suggestion here is the one I gave in https://github.com/zulip/zulip/pull/13198#issuecomment-533274408: > I think a more correct way to “strip diacritics” would be to normalize to NFKD and remove all characters of general category Mark: > > ```js > s.normalize("NFKD").replace(/\p{M}/gu, "") > ``` > > For today’s browsers, we’d need to turn on `@babel/plugin-proposal-unicode-property-regex`, which expands that to > > ```js > s.normalize("NFKD").replace(/(?:[\u0300-\u036F\u0483-\u0489\u0591-\u05BD\u05BF\u05C1\u05C2\u05C4\u05C5\u05C7\u0610-\u061A\u064B-\u065F\u0670\u06D6-\u06DC\u06DF-\u06E4\u06E7\u06E8\u06EA-\u06ED\u0711\u0730-\u074A\u07A6-\u07B0\u07EB-\u07F3\u07FD\u0816-\u0819\u081B-\u0823\u0825-\u0827\u0829-\u082D\u0859-\u085B\u08D3-\u08E1\u08E3-\u0903\u093A-\u093C\u093E-\u094F\u0951-\u0957\u0962\u0963\u0981-\u0983\u09BC\u09BE-\u09C4\u09C7\u09C8\u09CB-\u09CD\u09D7\u09E2\u09E3\u09FE\u0A01-\u0A03\u0A3C\u0A3E-\u0A42\u0A47\u0A48\u0A4B-\u0A4D\u0A51\u0A70\u0A71\u0A75\u0A81-\u0A83\u0ABC\u0ABE-\u0AC5\u0AC7-\u0AC9\u0ACB-\u0ACD\u0AE2\u0AE3\u0AFA-\u0AFF\u0B01-\u0B03\u0B3C\u0B3E-\u0B44\u0B47\u0B48\u0B4B-\u0B4D\u0B56\u0B57\u0B62\u0B63\u0B82\u0BBE-\u0BC2\u0BC6-\u0BC8\u0BCA-\u0BCD\u0BD7\u0C00-\u0C04\u0C3E-\u0C44\u0C46-\u0C48\u0C4A-\u0C4D\u0C55\u0C56\u0C62\u0C63\u0C81-\u0C83\u0CBC\u0CBE-\u0CC4\u0CC6-\u0CC8\u0CCA-\u0CCD\u0CD5\u0CD6\u0CE2\u0CE3\u0D00-\u0D03\u0D3B\u0D3C\u0D3E-\u0D44\u0D46-\u0D48\u0D4A-\u0D4D\u0D57\u0D62\u0D63\u0D82\u0D83\u0DCA\u0DCF-\u0DD4\u0DD6\u0DD8-\u0DDF\u0DF2\u0DF3\u0E31\u0E34-\u0E3A\u0E47-\u0E4E\u0EB1\u0EB4-\u0EBC\u0EC8-\u0ECD\u0F18\u0F19\u0F35\u0F37\u0F39\u0F3E\u0F3F\u0F71-\u0F84\u0F86\u0F87\u0F8D-\u0F97\u0F99-\u0FBC\u0FC6\u102B-\u103E\u1056-\u1059\u105E-\u1060\u1062-\u1064\u1067-\u106D\u1071-\u1074\u1082-\u108D\u108F\u109A-\u109D\u135D-\u135F\u1712-\u1714\u1732-\u1734\u1752\u1753\u1772\u1773\u17B4-\u17D3\u17DD\u180B-\u180D\u1885\u1886\u18A9\u1920-\u192B\u1930-\u193B\u1A17-\u1A1B\u1A55-\u1A5E\u1A60-\u1A7C\u1A7F\u1AB0-\u1ABE\u1B00-\u1B04\u1B34-\u1B44\u1B6B-\u1B73\u1B80-\u1B82\u1BA1-\u1BAD\u1BE6-\u1BF3\u1C24-\u1C37\u1CD0-\u1CD2\u1CD4-\u1CE8\u1CED\u1CF4\u1CF7-\u1CF9\u1DC0-\u1DF9\u1DFB-\u1DFF\u20D0-\u20F0\u2CEF-\u2CF1\u2D7F\u2DE0-\u2DFF\u302A-\u302F\u3099\u309A\uA66F-\uA672\uA674-\uA67D\uA69E\uA69F\uA6F0\uA6F1\uA802\uA806\uA80B\uA823-\uA827\uA880\uA881\uA8B4-\uA8C5\uA8E0-\uA8F1\uA8FF\uA926-\uA92D\uA947-\uA953\uA980-\uA983\uA9B3-\uA9C0\uA9E5\uAA29-\uAA36\uAA43\uAA4C\uAA4D\uAA7B-\uAA7D\uAAB0\uAAB2-\uAAB4\uAAB7\uAAB8\uAABE\uAABF\uAAC1\uAAEB-\uAAEF\uAAF5\uAAF6\uABE3-\uABEA\uABEC\uABED\uFB1E\uFE00-\uFE0F\uFE20-\uFE2F]|\uD800[\uDDFD\uDEE0\uDF76-\uDF7A]|\uD802[\uDE01-\uDE03\uDE05\uDE06\uDE0C-\uDE0F\uDE38-\uDE3A\uDE3F\uDEE5\uDEE6]|\uD803[\uDD24-\uDD27\uDF46-\uDF50]|\uD804[\uDC00-\uDC02\uDC38-\uDC46\uDC7F-\uDC82\uDCB0-\uDCBA\uDD00-\uDD02\uDD27-\uDD34\uDD45\uDD46\uDD73\uDD80-\uDD82\uDDB3-\uDDC0\uDDC9-\uDDCC\uDE2C-\uDE37\uDE3E\uDEDF-\uDEEA\uDF00-\uDF03\uDF3B\uDF3C\uDF3E-\uDF44\uDF47\uDF48\uDF4B-\uDF4D\uDF57\uDF62\uDF63\uDF66-\uDF6C\uDF70-\uDF74]|\uD805[\uDC35-\uDC46\uDC5E\uDCB0-\uDCC3\uDDAF-\uDDB5\uDDB8-\uDDC0\uDDDC\uDDDD\uDE30-\uDE40\uDEAB-\uDEB7\uDF1D-\uDF2B]|\uD806[\uDC2C-\uDC3A\uDDD1-\uDDD7\uDDDA-\uDDE0\uDDE4\uDE01-\uDE0A\uDE33-\uDE39\uDE3B-\uDE3E\uDE47\uDE51-\uDE5B\uDE8A-\uDE99]|\uD807[\uDC2F-\uDC36\uDC38-\uDC3F\uDC92-\uDCA7\uDCA9-\uDCB6\uDD31-\uDD36\uDD3A\uDD3C\uDD3D\uDD3F-\uDD45\uDD47\uDD8A-\uDD8E\uDD90\uDD91\uDD93-\uDD97\uDEF3-\uDEF6]|\uD81A[\uDEF0-\uDEF4\uDF30-\uDF36]|\uD81B[\uDF4F\uDF51-\uDF87\uDF8F-\uDF92]|\uD82F[\uDC9D\uDC9E]|\uD834[\uDD65-\uDD69\uDD6D-\uDD72\uDD7B-\uDD82\uDD85-\uDD8B\uDDAA-\uDDAD\uDE42-\uDE44]|\uD836[\uDE00-\uDE36\uDE3B-\uDE6C\uDE75\uDE84\uDE9B-\uDE9F\uDEA1-\uDEAF]|\uD838[\uDC00-\uDC06\uDC08-\uDC18\uDC1B-\uDC21\uDC23\uDC24\uDC26-\uDC2A\uDD30-\uDD36\uDEEC-\uDEEF]|\uD83A[\uDCD0-\uDCD6\uDD44-\uDD4A]|\uDB40[\uDD00-\uDDEF])/g, "") > ``` @aero31aero can you open a PR doing that? We can test how mention typeahead performance is with that many users to see if the normalization thing is slow. It may very well not be. If it is, I think it'd likely suffice to cache the results of NKFD normalization via a simple dictionary cache in `remove_diacritics` to avoid recomputing these things.
2019-12-07T01:56:13
zulip/zulip
13,529
zulip__zulip-13529
[ "13528" ]
9812c6d445220c1d799ec5b856c336198ed00e30
diff --git a/zerver/migrations/0209_user_profile_no_empty_password.py b/zerver/migrations/0209_user_profile_no_empty_password.py --- a/zerver/migrations/0209_user_profile_no_empty_password.py +++ b/zerver/migrations/0209_user_profile_no_empty_password.py @@ -3,7 +3,6 @@ from __future__ import unicode_literals from django.conf import settings -from django.contrib.auth import get_backends from django.db import migrations from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor from django.db.migrations.state import StateApps @@ -13,7 +12,6 @@ from zerver.lib.cache import cache_delete, user_profile_by_api_key_cache_key from zerver.lib.queue import queue_json_publish from zerver.lib.utils import generate_api_key -from zproject.backends import EmailAuthBackend from typing import Any, Set, Union @@ -159,8 +157,7 @@ def write_realm_audit_log_entry(user_profile: Any, # If Zulip's built-in password authentication is not enabled on # the server level, then we plan to skip resetting any users' API # keys, since the bug requires EmailAuthBackend. - email_auth_enabled = any(isinstance(backend, EmailAuthBackend) - for backend in get_backends()) + email_auth_enabled = 'zproject.backends.EmailAuthBackend' in settings.AUTHENTICATION_BACKENDS # A quick note: This query could in theory exclude users with # is_active=False, is_bot=True, or realm__deactivated=True here to
Upgrade to 2.1 fails due to LDAP settings migration Attempted upgrading from 2.0.1 to 2.1. Same issue as #13527 (even though he seems to have managed to install it), but more elaborate issue detailing. **Upgrade Log** ``` root@x-Zulip:/home/x# /home/zulip/deployments/current/scripts/upgrade-zulip zulip-server-latest.tar.gz 2019-12-13 12:33:36,393 upgrade-zulip: Archiving the tarball under /home/zulip/archives 2019-12-13 12:33:37,140 upgrade-zulip: Unpacking the tarball 2019-12-13 12:33:39,133 upgrade-zulip-stage-2: Upgrading system packages... [...] Extracting to /srv/zulip-yarn... Adding to /usr/bin Successfully installed Yarn 1.19.1! Generated Camo config file /etc/default/camo generate_secrets: No new secrets to generate. 2019-12-13 12:34:42,379 upgrade-zulip-stage-2: Installing static assets... 2019-12-13 12:34:51,096 upgrade-zulip-stage-2: Checking for needed migrations process-fts-updates: stopped 2019-12-13 12:35:00,651 upgrade-zulip-stage-2: Stopping Zulip... zulip-django: stopped zulip-senders:zulip_events_message_sender-1: stopped zulip-senders:zulip_events_message_sender-3: stopped zulip-senders:zulip_events_message_sender-2: stopped zulip-senders:zulip_events_message_sender-0: stopped zulip-tornado: stopped zulip-thumbor: ERROR (not running) zulip-workers:zulip_deliver_enqueued_emails: stopped zulip-workers:zulip_deliver_scheduled_messages: stopped zulip-workers:zulip_events_missedmessage_mobile_notifications: stopped zulip-workers:zulip_events_user_presence: stopped zulip-workers:zulip_events_error_reports: stopped zulip-workers:zulip_events_missedmessage_emails: stopped zulip-workers:zulip_events_user_activity: stopped zulip-workers:zulip_events_email_senders: stopped zulip-workers:zulip_events_user_activity_interval: stopped zulip-workers:zulip_events_signups: stopped zulip-workers:zulip_events_slow_queries: stopped zulip-workers:zulip_events_embed_links: stopped zulip-workers:zulip_events_deferred_work: stopped zulip-workers:zulip_events_embedded_bots: stopped zulip-workers:zulip_events_missedmessage_email_senders: stopped zulip-workers:zulip_events_digest_emails: stopped zulip-workers:zulip_events_invites: stopped zulip-workers:zulip_events_email_mirror: stopped zulip-workers:zulip_events_feedback_messages: stopped zulip-workers:zulip_events_outgoing_webhooks: stopped 2019-12-13 12:35:36,197 upgrade-zulip-stage-2: Applying puppet changes... Notice: Compiled catalog for x-zulip in environment production in 1.76 seconds Notice: /Stage[main]/Zulip::Apt_repository/Exec[setup_apt_repo]/returns: executed successfully Notice: /Stage[main]/Zulip::Nginx/File[/etc/nginx/zulip-include/api_headers]/content: content changed '{md5}eface8f535c27ab843e0a387fb0b6a7f' to '{md5}e7bac40e48c557daafbb26ade24d37ec' Notice: /Stage[main]/Zulip::Nginx/File[/etc/nginx/zulip-include/uploads.route]/content: content changed '{md5}e78055c8a0fbf9a352fc903e9031bbd8' to '{md5}60b80db54086113bfb1e1c19a756f59a' Notice: /Stage[main]/Zulip::Nginx/File[/etc/nginx/dhparam.pem]/ensure: defined content as '{md5}4328d5f118ce830b50784cc5a8a07ea9' Notice: /Stage[main]/Zulip::Nginx/File[/etc/nginx/nginx.conf]/content: content changed '{md5}cf86eb308a50a4b68c7421c483cea259' to '{md5}1f191db6216c5cf99c704378cb80e0cf' Notice: /Stage[main]/Zulip::Nginx/File[/var/log/nginx]/owner: owner changed 'root' to 'zulip' Notice: /Stage[main]/Zulip::Nginx/File[/var/log/nginx]/mode: mode changed '0755' to '0750' Notice: /Stage[main]/Zulip::App_frontend_base/File[/etc/nginx/zulip-include/app]/content: content changed '{md5}22a9f5783fcf0c2de4732c670fe2d1fc' to '{md5}c5cddab158cd46e9d53c33a937725f53' Notice: /Stage[main]/Zulip::App_frontend_base/File[/etc/nginx/zulip-include/uploads.types]/content: content changed '{md5}8c4dd86c0bddb31f6007b0d8b7e4d1e9' to '{md5}ff16edc22bb798754628e5ae322ee895' Notice: /Stage[main]/Zulip::App_frontend_base/File[/etc/zulip/uwsgi.ini]/content: content changed '{md5}5b20133a7d6aef1460e18153eb9097bc' to '{md5}3f8ef89647983a1a54c12525f3cce0bd' Notice: /Stage[main]/Zulip::App_frontend_base/File[/usr/lib/nagios/plugins/zulip_app_frontend/check_cron_file]/content: content changed '{md5}823420f263f2b8b5287bf2da9a71ec79' to '{md5}662b779127a8500eafaf7e0abad81aec' Notice: /Stage[main]/Zulip::App_frontend_base/File[/usr/lib/nagios/plugins/zulip_app_frontend/check_rabbitmq_consumers]/content: content changed '{md5}f73299d1ce1b1b80412562d1d92af33d' to '{md5}072ece52e8901790043f1e63e5969871' Notice: /Stage[main]/Zulip::App_frontend_base/File[/usr/lib/nagios/plugins/zulip_app_frontend/check_send_receive_time]/content: content changed '{md5}12f364b7b071eeaab9b24727a1787fcf' to '{md5}5a5e6f4170652184f7825db677735800' Notice: /Stage[main]/Zulip::App_frontend_once/File[/etc/cron.d/send-digest-emails]/content: content changed '{md5}339d19cd1c1f10721bd90a5507148ce6' to '{md5}6a483bafb63f5588e673faf2f4bf2c01' Notice: /Stage[main]/Zulip::App_frontend_once/File[/etc/cron.d/update-analytics-counts]/content: content changed '{md5}da37aff132764038e73a6ee3914c696b' to '{md5}f337454a2a63125ceaa1d5985a40b784' Notice: /Stage[main]/Zulip::App_frontend/File[/etc/nginx/sites-available/zulip-enterprise]/content: content changed '{md5}e6814e1bc9711cb25ac302391eed5b2e' to '{md5}bbc2679b3e2e3e3152d42d1a03056b21' Notice: /Stage[main]/Zulip::Postgres_common/File[/usr/lib/nagios/plugins/zulip_postgres_common/check_postgres_backup]/content: content changed '{md5}51a507afbbcdfc8ddfcaeea6c1a908dd' to '{md5}f8b555d2be6d09cdadaae096f307e939' Notice: /Stage[main]/Zulip::Process_fts_updates/File[/usr/local/bin/process_fts_updates]/content: content changed '{md5}3c85531681133a1270b46ee9b732f7a0' to '{md5}eccded7052f77f8dd717326ea5e7b5c1' Notice: /Stage[main]/Zulip::Supervisor/Service[supervisor]: Triggered 'refresh' from 1 event Notice: /Stage[main]/Zulip::Nginx/Service[nginx]: Triggered 'refresh' from 7 events Notice: /Stage[main]/Zulip::Postgres_common/File[/usr/local/bin/pg_backup_and_purge]/content: content changed '{md5}6eef29125435eef7c53a570d040828e2' to '{md5}4b8e2b5f0ab1795a62d84db88e8cade6' Notice: /Stage[main]/Zulip::Postgres_appdb_tuned/File[/etc/postgresql/10/main/postgresql.conf]/content: content changed '{md5}edf8ed3a79280005c200e537f0885f7c' to '{md5}c745ab2813a8e30ffcd709afbb7aba69' Notice: /Stage[main]/Zulip::Postgres_appdb_tuned/Exec[pg_ctlcluster 10 main restart]: Triggered 'refresh' from 1 event Notice: Applied catalog in 13.94 seconds [...] 2019-12-13 12:35:56,803 upgrade-zulip-stage-2: Applying database migrations... Operations to perform: Apply all migrations: analytics, auth, confirmation, contenttypes, otp_static, otp_totp, pgroonga, sessions, social_django, two_factor, zerver Running migrations: Applying otp_totp.0002_auto_20190420_0723... OK Applying two_factor.0006_phonedevice_key_default... OK Applying zerver.0209_stream_first_message_id... OK Applying zerver.0210_stream_first_message_id... OK Applying zerver.0211_add_users_field_to_scheduled_email... OK Applying zerver.0212_make_stream_email_token_unique... OK Applying zerver.0213_realm_digest_weekday... OK Applying zerver.0214_realm_invite_to_stream_policy... OK Applying zerver.0215_realm_avatar_changes_disabled... OK Applying zerver.0216_add_create_stream_policy... OK Applying zerver.0217_migrate_create_stream_policy... OK Applying zerver.0218_remove_create_stream_by_admins_only... OK Applying zerver.0219_toggle_realm_digest_emails_enabled_default... OK Applying zerver.0220_subscription_notification_settings... OK Applying zerver.0221_subscription_notifications_data_migration... OK Applying zerver.0222_userprofile_fluid_layout_width... OK Applying zerver.0223_rename_to_is_muted... OK Applying zerver.0224_alter_field_realm_video_chat_provider... OK Applying zerver.0225_archived_reaction_model... OK Applying zerver.0226_archived_submessage_model... OK Applying zerver.0227_inline_url_embed_preview_default_off... OK Applying zerver.0228_userprofile_demote_inactive_streams... OK Applying zerver.0229_stream_message_retention_days... OK Applying zerver.0230_rename_to_enable_stream_audible_notifications... OK Applying zerver.0231_add_archive_transaction_model... OK Applying zerver.0232_make_archive_transaction_field_not_nullable... OK Applying zerver.0233_userprofile_avatar_hash... OK Applying zerver.0234_add_external_account_custom_profile_field... OK Applying zerver.0235_userprofile_desktop_icon_count_display... OK Applying zerver.0236_remove_illegal_characters_email_full... OK Applying zerver.0237_rename_zulip_realm_to_zulipinternal... OK Applying zerver.0238_usermessage_bigint_id... OK Applying zerver.0239_usermessage_copy_id_to_bigint_id... OK Applying zerver.0240_usermessage_migrate_bigint_id_into_id... OK Applying zerver.0241_usermessage_bigint_id_migration_finalize... OK Applying zerver.0242_fix_bot_email_property... OK Applying zerver.0243_message_add_date_sent_column... OK Applying zerver.0244_message_copy_pub_date_to_date_sent... OK Applying zerver.0245_message_date_sent_finalize_part1... OK Applying zerver.0246_message_date_sent_finalize_part2... OK Applying zerver.0247_realmauditlog_event_type_to_int... OK Applying zerver.0248_userprofile_role_start... OK Applying zerver.0249_userprofile_role_finish... OK Applying zerver.0250_saml_auth... OK Applying zerver.0251_prereg_user_add_full_name... OK Applying zerver.0252_realm_user_group_edit_policy... OK Applying zerver.0253_userprofile_wildcard_mentions_notify... OK Applying zerver.0209_user_profile_no_empty_password...Traceback (most recent call last): File "./manage.py", line 46, in <module> execute_from_command_line(sys.argv) File "/home/zulip/deployments/2019-12-13-13-33-37/zulip-py3-venv/lib/python3.6/site-packages/django/core/management/__init__.py", line 364, in execute_from_command_line utility.execute() File "/home/zulip/deployments/2019-12-13-13-33-37/zulip-py3-venv/lib/python3.6/site-packages/django/core/management/__init__.py", line 356, in execute self.fetch_command(subcommand).run_from_argv(self.argv) File "/home/zulip/deployments/2019-12-13-13-33-37/zulip-py3-venv/lib/python3.6/site-packages/django/core/management/base.py", line 283, in run_from_argv self.execute(*args, **cmd_options) File "/home/zulip/deployments/2019-12-13-13-33-37/zulip-py3-venv/lib/python3.6/site-packages/django/core/management/base.py", line 330, in execute output = self.handle(*args, **options) File "/home/zulip/deployments/2019-12-13-13-33-37/zulip-py3-venv/lib/python3.6/site-packages/django/core/management/commands/migrate.py", line 204, in handle fake_initial=fake_initial, File "/home/zulip/deployments/2019-12-13-13-33-37/zulip-py3-venv/lib/python3.6/site-packages/django/db/migrations/executor.py", line 115, in migrate state = self._migrate_all_forwards(state, plan, full_plan, fake=fake, fake_initial=fake_initial) File "/home/zulip/deployments/2019-12-13-13-33-37/zulip-py3-venv/lib/python3.6/site-packages/django/db/migrations/executor.py", line 145, in _migrate_all_forwards state = self.apply_migration(state, migration, fake=fake, fake_initial=fake_initial) File "/home/zulip/deployments/2019-12-13-13-33-37/zulip-py3-venv/lib/python3.6/site-packages/django/db/migrations/executor.py", line 244, in apply_migration state = migration.apply(state, schema_editor) File "/home/zulip/deployments/2019-12-13-13-33-37/zulip-py3-venv/lib/python3.6/site-packages/django/db/migrations/migration.py", line 129, in apply operation.database_forwards(self.app_label, schema_editor, old_state, project_state) File "/home/zulip/deployments/2019-12-13-13-33-37/zulip-py3-venv/lib/python3.6/site-packages/django/db/migrations/operations/special.py", line 193, in database_forwards self.code(from_state.apps, schema_editor) File "/home/zulip/deployments/2019-12-13-13-33-37/zerver/migrations/0209_user_profile_no_empty_password.py", line 163, in ensure_no_empty_passwords for backend in get_backends()) File "/home/zulip/deployments/2019-12-13-13-33-37/zulip-py3-venv/lib/python3.6/site-packages/django/contrib/auth/__init__.py", line 40, in get_backends return _get_backends(return_tuples=False) File "/home/zulip/deployments/2019-12-13-13-33-37/zulip-py3-venv/lib/python3.6/site-packages/django/contrib/auth/__init__.py", line 29, in _get_backends backend = load_backend(backend_path) File "/home/zulip/deployments/2019-12-13-13-33-37/zulip-py3-venv/lib/python3.6/site-packages/django/contrib/auth/__init__.py", line 23, in load_backend return import_string(path)() File "/home/zulip/deployments/2019-12-13-13-33-37/zproject/backends.py", line 322, in __init__ check_ldap_config() File "/home/zulip/deployments/2019-12-13-13-33-37/zproject/backends.py", line 257, in check_ldap_config assert settings.AUTH_LDAP_USERNAME_ATTR and settings.AUTH_LDAP_REVERSE_EMAIL_SEARCH AssertionError Traceback (most recent call last): File "/home/zulip/deployments/2019-12-13-13-33-37/scripts/lib/upgrade-zulip-stage-2", line 188, in <module> subprocess.check_call(["./manage.py", "migrate", "--noinput"], preexec_fn=su_to_zulip) File "/usr/lib/python3.6/subprocess.py", line 311, in check_call raise CalledProcessError(retcode, cmd) subprocess.CalledProcessError: Command '['./manage.py', 'migrate', '--noinput']' returned non-zero exit status 1. Traceback (most recent call last): File "/home/zulip/deployments/current/scripts/lib/upgrade-zulip", line 58, in <module> + deploy_options) File "/usr/lib/python3.6/subprocess.py", line 311, in check_call raise CalledProcessError(retcode, cmd) subprocess.CalledProcessError: Command '['/home/zulip/deployments/2019-12-13-13-33-37/scripts/lib/upgrade-zulip-stage-2', '/home/zulip/deployments/2019-12-13-13-33-37']' returned non-zero exit status 1. root@x-Zulip:/home/x# ``` **Settings** ``` import ldap from django_auth_ldap.config import LDAPSearch, GroupOfNamesType # URI of your LDAP server. If set, LDAP is used to prepopulate a user's name in # Zulip. Example: "ldaps://ldap.example.com" AUTH_LDAP_SERVER_URI = "ldaps://ldapproxy.comsa.biz:636" #AUTH_LDAP_START_TLS = True # This DN will be used to bind to your server. If unset, anonymous # binds are performed. # # If set, you need to specify the password in zulip-secrets.conf , # as 'auth_ldap_bind_password'. AUTH_LDAP_BIND_DN = "cn=adquery,ou=serviceaccounts,dc=comsa,dc=loc" # Specify the search base and the property to filter on that corresponds to the # username. AUTH_LDAP_USER_SEARCH = LDAPSearch("DC=comsa,DC=loc", ldap.SCOPE_SUBTREE, "(&(objectClass=user)(sAMAccountName=%(user)s) \ (memberOf:1.2.840.113556.1.4.1941:=cn=Zulip,ou=APPs,ou=Security-Groups,dc=comsa,dc=loc))") # If the value of a user's "uid" (or similar) property is not their email # address, specify the domain to append here. LDAP_APPEND_DOMAIN = "" # type: Optional[str] # If username and email are two different LDAP attributes, specify the # attribute to get the user's email address from LDAP here. LDAP_EMAIL_ATTR = "mail" # type: Optional[str] # This map defines how to populate attributes of a Zulip user from LDAP. AUTH_LDAP_USER_ATTR_MAP = { # full_name is required; common values include "cn" or "displayName". "full_name": "cn", "userAccountControl": "userAccountControl", "custom_profile_field__mobil": "mobile", "custom_profile_field__durchwahl":"telephoneNumber" } ```
@alfonsrv You will have to configure the additional LDAP settings - see https://zulip.readthedocs.io/en/latest/overview/changelog.html ``Upgrade notes`` section. But indeed this behavior isn't right in that the migrations shouldn't be failing due to that misconfiguration, we'll have to fix that. As a workaround, you can disable the ldap backend in ``AUTHENTICATION_BACKENDS`` and then upgrade. Then you can re-enable. As mentioned, you'll have to configure the new settings for the backend to work, but the upgrade will go through. Or you can configure the new settings first, and then upgrade.
2019-12-13T17:13:06
zulip/zulip
13,542
zulip__zulip-13542
[ "13539" ]
998b52dd8cc77e7f4fef76dd38e7c7efd0365f95
diff --git a/zproject/backends.py b/zproject/backends.py --- a/zproject/backends.py +++ b/zproject/backends.py @@ -682,13 +682,13 @@ def get_or_build_user(self, username: str, if user_disabled_in_ldap: if user.is_active: logging.info("Deactivating user %s because they are disabled in LDAP." % - (user.email,)) + (user.delivery_email,)) do_deactivate_user(user) # Do an early return to avoid trying to sync additional data. return (user, built) elif not user.is_active: logging.info("Reactivating user %s because they are not disabled in LDAP." % - (user.email,)) + (user.delivery_email,)) do_reactivate_user(user) self.sync_avatar_from_ldap(user, ldap_user) @@ -717,14 +717,14 @@ def catch_ldap_error(signal: Signal, **kwargs: Any) -> None: def sync_user_from_ldap(user_profile: UserProfile, logger: logging.Logger) -> bool: backend = ZulipLDAPUserPopulator() try: - ldap_username = backend.django_to_ldap_username(user_profile.email) + ldap_username = backend.django_to_ldap_username(user_profile.delivery_email) except ZulipLDAPExceptionNoMatchingLDAPUser: if settings.LDAP_DEACTIVATE_NON_MATCHING_USERS: do_deactivate_user(user_profile) - logger.info("Deactivated non-matching user: %s" % (user_profile.email,)) + logger.info("Deactivated non-matching user: %s" % (user_profile.delivery_email,)) return True elif user_profile.is_active: - logger.warning("Did not find %s in LDAP." % (user_profile.email,)) + logger.warning("Did not find %s in LDAP." % (user_profile.delivery_email,)) return False # What one would expect to see like to do here is just a call to @@ -744,7 +744,7 @@ def sync_user_from_ldap(user_profile: UserProfile, logger: logging.Logger) -> bo # making this flow possible in a more directly supported fashion. updated_user = ZulipLDAPUser(backend, ldap_username, realm=user_profile.realm).populate_user() if updated_user: - logger.info("Updated %s." % (user_profile.email,)) + logger.info("Updated %s." % (user_profile.delivery_email,)) return True raise PopulateUserLDAPError("populate_user unexpectedly returned {}".format(updated_user))
diff --git a/zerver/tests/test_auth_backends.py b/zerver/tests/test_auth_backends.py --- a/zerver/tests/test_auth_backends.py +++ b/zerver/tests/test_auth_backends.py @@ -28,6 +28,7 @@ do_invite_users, do_reactivate_realm, do_reactivate_user, + do_set_realm_property, ensure_stream, validate_email, ) @@ -2986,6 +2987,18 @@ def test_update_full_name(self) -> None: hamlet = self.example_user('hamlet') self.assertEqual(hamlet.full_name, 'New Name') + def test_update_with_hidden_emails(self) -> None: + hamlet = self.example_user('hamlet') + realm = get_realm("zulip") + do_set_realm_property(realm, 'email_address_visibility', Realm.EMAIL_ADDRESS_VISIBILITY_ADMINS) + hamlet.refresh_from_db() + + self.change_ldap_user_attr('hamlet', 'cn', 'New Name') + self.perform_ldap_sync(hamlet) + + hamlet.refresh_from_db() + self.assertEqual(hamlet.full_name, 'New Name') + def test_update_split_full_name(self) -> None: self.change_ldap_user_attr('hamlet', 'cn', 'Name') self.change_ldap_user_attr('hamlet', 'sn', 'Full')
Hiding email adresses with ldap caused everyone to be locked out I am sorry to bother you again but I managed to lock myself and everyone out. I am running current master branch due to needing realm admin perms based on ldap user groups. I flipped some switches in the Admin Section and now no one can log in. I am not sure what setting exactly caused this but I remember to hide email adresses from other users. When I ran (and run) ldap sync I get this result: ``` 2019-12-15 16:50:48.895 INFO [zulip.sync_ldap_user_data] Starting update. 2019-12-15 16:50:48.954 INFO [zulip.sync_ldap_user_data] Deactivated non-matching user: user10@<domain> 2019-12-15 16:50:48.994 INFO [zulip.sync_ldap_user_data] Deactivated non-matching user: user15@<domain> 2019-12-15 16:50:49.034 INFO [zulip.sync_ldap_user_data] Deactivated non-matching user: user16@<domain> 2019-12-15 16:50:49.067 INFO [zulip.sync_ldap_user_data] Deactivated non-matching user: user17@<domain> 2019-12-15 16:50:49.103 INFO [zulip.sync_ldap_user_data] Deactivated non-matching user: user18@<domain> 2019-12-15 16:50:49.138 INFO [zulip.sync_ldap_user_data] Deactivated non-matching user: user19@<domain> 2019-12-15 16:50:49.172 INFO [zulip.sync_ldap_user_data] Deactivated non-matching user: user11@<domain> 2019-12-15 16:50:49.206 INFO [zulip.sync_ldap_user_data] Deactivated non-matching user: user12@<domain> (and a bunch more) ``` Where <domain> is the main domain of the instance. The usernames are obviously not userXY. I am using cn and email from ldap as log in. query_ldap stil works, I can retrieve the Full name and email from the ldap using either cn or email.
Ideally I would like to fix this with psql. But I guess this is also some kind of bug that should not happen.
2019-12-16T01:14:05
zulip/zulip
13,666
zulip__zulip-13666
[ "13416" ]
30ee0c2a49aafddbeb00ca488827d54b150b50f7
diff --git a/zerver/lib/email_mirror.py b/zerver/lib/email_mirror.py --- a/zerver/lib/email_mirror.py +++ b/zerver/lib/email_mirror.py @@ -141,6 +141,8 @@ def construct_zulip_body(message: message.Message, realm: Realm, show_sender: bo if not include_footer: body = filter_footer(body) + if not body.endswith('\n'): + body += '\n' body += extract_and_upload_attachments(message, realm) body = body.strip() if not body: @@ -226,14 +228,9 @@ def filter_footer(text: str) -> str: def extract_and_upload_attachments(message: message.Message, realm: Realm) -> str: user_profile = get_system_bot(settings.EMAIL_GATEWAY_BOT) - attachment_links = [] - - payload = message.get_payload() - if not isinstance(payload, list): - # This is not a multipart message, so it can't contain attachments. - return "" - for part in payload: + attachment_links = [] + for part in message.walk(): content_type = part.get_content_type() filename = part.get_filename() if filename: @@ -249,7 +246,7 @@ def extract_and_upload_attachments(message: message.Message, realm: Realm) -> st logger.warning("Payload is not bytes (invalid attachment %s in message from %s)." % (filename, message.get("From"))) - return "\n".join(attachment_links) + return '\n'.join(attachment_links) def decode_stream_email_address(email: str) -> Tuple[Stream, Dict[str, bool]]: token, options = decode_email_address(email) diff --git a/zerver/management/commands/send_to_email_mirror.py b/zerver/management/commands/send_to_email_mirror.py --- a/zerver/management/commands/send_to_email_mirror.py +++ b/zerver/management/commands/send_to_email_mirror.py @@ -105,12 +105,15 @@ def _parse_email_fixture(self, fixture_path: str) -> Message: def _prepare_message(self, message: Message, realm: Realm, stream_name: str) -> None: stream = get_stream(stream_name, realm) + # The block below ensures that the imported email message doesn't have any recipient-like + # headers that are inconsistent with the recipient we want (the stream address). recipient_headers = ["X-Gm-Original-To", "Delivered-To", - "Resent-To", "Resent-CC", "To", "CC"] + "Resent-To", "Resent-CC", "CC"] for header in recipient_headers: if header in message: del message[header] message[header] = encode_email_address(stream) - return + if 'To' in message: + del message['To'] message['To'] = encode_email_address(stream)
diff --git a/zerver/tests/test_email_mirror.py b/zerver/tests/test_email_mirror.py --- a/zerver/tests/test_email_mirror.py +++ b/zerver/tests/test_email_mirror.py @@ -422,7 +422,45 @@ def test_message_with_valid_attachment(self) -> None: target_realm=user_profile.realm) message = most_recent_message(user_profile) - self.assertEqual(message.content, "Test body[image.png](https://test_url)") + self.assertEqual(message.content, "Test body\n[image.png](https://test_url)") + + def test_message_with_valid_nested_attachment(self) -> None: + user_profile = self.example_user('hamlet') + self.login(user_profile.email) + self.subscribe(user_profile, "Denmark") + stream = get_stream("Denmark", user_profile.realm) + stream_to_address = encode_email_address(stream) + + incoming_valid_message = MIMEMultipart() + text_msg = MIMEText("Test body") + incoming_valid_message.attach(text_msg) + + nested_multipart = MIMEMultipart() + nested_text_message = MIMEText("Nested text that should get skipped.") + nested_multipart.attach(nested_text_message) + with open(os.path.join(settings.DEPLOY_ROOT, "static/images/default-avatar.png"), 'rb') as f: + image_bytes = f.read() + + attachment_msg = MIMEImage(image_bytes) + attachment_msg.add_header('Content-Disposition', 'attachment', filename="image.png") + nested_multipart.attach(attachment_msg) + incoming_valid_message.attach(nested_multipart) + + incoming_valid_message['Subject'] = 'Subject' + incoming_valid_message['From'] = self.example_email('hamlet') + incoming_valid_message['To'] = stream_to_address + incoming_valid_message['Reply-to'] = self.example_email('othello') + + with mock.patch('zerver.lib.email_mirror.upload_message_file', + return_value='https://test_url') as upload_message_file: + process_message(incoming_valid_message) + upload_message_file.assert_called_with('image.png', len(image_bytes), + 'image/png', image_bytes, + get_system_bot(settings.EMAIL_GATEWAY_BOT), + target_realm=user_profile.realm) + + message = most_recent_message(user_profile) + self.assertEqual(message.content, "Test body\n[image.png](https://test_url)") def test_message_with_invalid_attachment(self) -> None: user_profile = self.example_user('hamlet')
Include attachments with emails ### Current and desired situation When I forwarded an email that contained a PDF attachment I was surprised that Zulip did not include this PDF, so I had to manually download and upload the attachment to the thread. ### Proposed implementation Show emails with their attachments, in the same fashion as attaching a file to a message works currently.
I had thought we did include attachments. @mateuszmandera can you investigate? Hello @zulip/server-development, @zulip/server-integrations members, this issue was labeled with the "area: emails", "area: integrations" labels, so you may want to check it out! <!-- areaLabelAddition --> @zulipbot claim
2020-01-14T16:43:34
zulip/zulip
13,741
zulip__zulip-13741
[ "13560" ]
a3f08f01ec0113e5731c0a3c483a7ed54a4a1be8
diff --git a/zerver/views/auth.py b/zerver/views/auth.py --- a/zerver/views/auth.py +++ b/zerver/views/auth.py @@ -47,6 +47,7 @@ import logging from social_django.utils import load_backend, load_strategy +from social_django.views import auth as social_django_auth from two_factor.forms import BackupTokenForm from two_factor.views import LoginView as BaseTwoFactorLoginView @@ -451,6 +452,24 @@ def start_social_signup(request: HttpRequest, backend: str, extra_arg: Optional[ return oauth_redirect_to_root(request, backend_url, 'social', is_signup=True, extra_url_params=extra_url_params) +def social_auth(request: HttpRequest, backend: str) -> HttpResponse: + """ + python-social-auth sets certain fields from the request into the session + and doesn't clear them if another request is made with a field that was present + in the previous request now missing. We use this function to hook into the beginning + of the social auth flow to ensure the session is properly cleared out. + This function and the corresponding url entry in urls.py should be removed if this issue + gets fixed upstream - https://github.com/python-social-auth/social-core/issues/425 + """ + + for field_name in settings.SOCIAL_AUTH_FIELDS_STORED_IN_SESSION: + try: + del request.session[field_name] + except KeyError: + pass + + return social_django_auth(request, backend) + def authenticate_remote_user(realm: Realm, email_address: Optional[str]) -> Optional[UserProfile]: if email_address is None: diff --git a/zproject/settings.py b/zproject/settings.py --- a/zproject/settings.py +++ b/zproject/settings.py @@ -224,6 +224,10 @@ def get_dirs(self) -> List[str]: # backends support the username not being unique; and they do. # See: https://docs.djangoproject.com/en/1.11/topics/auth/customizing/#django.contrib.auth.models.CustomUser.USERNAME_FIELD "auth.W004", + # urls.W003 warns against using colons in the name in url(..., name) because colons are used + # for namespaces. We need to override a url entry in the social: namespace, so we use + # the colon in this way intentionally. + "urls.W003", ] ######################################################################## diff --git a/zproject/urls.py b/zproject/urls.py --- a/zproject/urls.py +++ b/zproject/urls.py @@ -715,6 +715,12 @@ ] # Python Social Auth + +# This overrides the analogical entry in social_django.urls, because we want run our own code +# at the beginning of social auth process. If deleting this override in the future, +# it should be possible to remove urls.W003 from SILENCED_SYSTEM_CHECKS. +urls += [url(r'^login/(?P<backend>[^/]+)/$', zerver.views.auth.social_auth, name='social:begin')] + urls += [url(r'^', include('social_django.urls', namespace='social'))] urls += [url(r'^saml/metadata.xml$', zerver.views.auth.saml_sp_metadata)]
diff --git a/zerver/tests/test_auth_backends.py b/zerver/tests/test_auth_backends.py --- a/zerver/tests/test_auth_backends.py +++ b/zerver/tests/test_auth_backends.py @@ -786,6 +786,28 @@ def test_social_auth_desktop_success(self) -> None: self.assertEqual(result.status_code, 302) self.assert_logged_in_user_id(self.user_profile.id) + def test_social_auth_session_fields_cleared_correctly(self) -> None: + mobile_flow_otp = '1234abcd' * 8 + + def initiate_auth(mobile_flow_otp: Optional[str]=None) -> None: + url, headers = self.prepare_login_url_and_headers(subdomain='zulip', + mobile_flow_otp=mobile_flow_otp) + result = self.client_get(url, **headers) + self.assertEqual(result.status_code, 302) + + result = self.client_get(result.url, **headers) + self.assertEqual(result.status_code, 302) + + # Start social auth with mobile_flow_otp param. It should get saved into the session + # on SOCIAL_AUTH_SUBDOMAIN. + initiate_auth(mobile_flow_otp) + self.assertEqual(self.client.session['mobile_flow_otp'], mobile_flow_otp) + + # Make a request without mobile_flow_otp param and verify the field doesn't persist + # in the session from the previous request. + initiate_auth() + self.assertNotIn('mobile_flow_otp', self.client.session) + def test_social_auth_mobile_and_desktop_flow_in_one_request_error(self) -> None: otp = '1234abcd' * 8 account_data_dict = self.get_account_data_dict(email=self.email, name='Full Name')
After logging into mobile app with OAuth, same browser can’t log into webapp with OAuth The browser-based OAuth flow for the mobile app stuffs `mobile_flow_otp` into a session variable somewhere in Python Social Auth. If you then try to use the same browser to log into the webapp, the previous value of `mobile_flow_otp` leaks into the new authentication flow and you end up getting dumped back into the mobile app instead of logged into the webapp.
Hello @zulip/server-authentication members, this issue was labeled with the "area: authentication" label, so you may want to check it out! <!-- areaLabelAddition --> Annoying. @mateuszmandera FYI. Likely the fix is to just figure out how to clear the `python-social-auth` session when we start an oauth flow. I'm a bit surprised python-social-auth isn't doing this for us... :open_mouth: Thanks @andersk for pinning that down! I'm pretty sure I've run into that behavior before, and been annoyed by it. Facing this same problem while building the flow for desktop auth as well. Normal login in the browser (after app login ) also results in a prompt to open the app. @mateuszmandera can't we just make `oauth_redirect_to_root` (aka the starting point of social auth) clear the session at the beginning? Oh, hmm, I guess we need to clear session cookies on `auth.zulipchat.com` instead, which currently would be inside the actual start function for the social auth code (and thus harder to hack)?
2020-01-27T11:11:43
zulip/zulip
13,748
zulip__zulip-13748
[ "13736", "13736" ]
a3f08f01ec0113e5731c0a3c483a7ed54a4a1be8
diff --git a/zerver/lib/server_initialization.py b/zerver/lib/server_initialization.py new file mode 100644 --- /dev/null +++ b/zerver/lib/server_initialization.py @@ -0,0 +1,38 @@ +from django.conf import settings + +from zerver.lib.actions import do_change_is_admin +from zerver.lib.bulk_create import bulk_create_users +from zerver.models import Realm, UserProfile, email_to_username, get_client, \ + get_system_bot + +from typing import Iterable, Optional, Tuple + +def create_internal_realm() -> None: + realm = Realm.objects.create(string_id=settings.SYSTEM_BOT_REALM) + + # Create the "website" and "API" clients: + get_client("website") + get_client("API") + + internal_bots = [(bot['name'], bot['email_template'] % (settings.INTERNAL_BOT_DOMAIN,)) + for bot in settings.INTERNAL_BOTS] + create_users(realm, internal_bots, bot_type=UserProfile.DEFAULT_BOT) + # Set the owners for these bots to the bots themselves + bots = UserProfile.objects.filter(email__in=[bot_info[1] for bot_info in internal_bots]) + for bot in bots: + bot.bot_owner = bot + bot.save() + + # Initialize the email gateway bot as an API Super User + email_gateway_bot = get_system_bot(settings.EMAIL_GATEWAY_BOT) + do_change_is_admin(email_gateway_bot, True, permission="api_super_user") + +def create_users(realm: Realm, name_list: Iterable[Tuple[str, str]], + tos_version: Optional[str]=None, + bot_type: Optional[int]=None, + bot_owner: Optional[UserProfile]=None) -> None: + user_set = set() + for full_name, email in name_list: + short_name = email_to_username(email) + user_set.add((email, full_name, short_name, True)) + bulk_create_users(realm, user_set, bot_type=bot_type, bot_owner=bot_owner, tos_version=tos_version) diff --git a/zerver/management/commands/initialize_voyager_db.py b/zerver/management/commands/initialize_voyager_db.py --- a/zerver/management/commands/initialize_voyager_db.py +++ b/zerver/management/commands/initialize_voyager_db.py @@ -1,23 +1,14 @@ from argparse import ArgumentParser -from typing import Any, Iterable, Optional, Tuple +from typing import Any from django.conf import settings from django.core.management.base import BaseCommand -from zerver.lib.actions import do_change_is_admin -from zerver.lib.bulk_create import bulk_create_users -from zerver.models import Realm, UserProfile, email_to_username, get_client, \ - get_system_bot +from zerver.lib.server_initialization import create_internal_realm +from zerver.models import Realm settings.TORNADO_SERVER = None -def create_users(realm: Realm, name_list: Iterable[Tuple[str, str]], bot_type: Optional[int]=None) -> None: - user_set = set() - for full_name, email in name_list: - short_name = email_to_username(email) - user_set.add((email, full_name, short_name, True)) - bulk_create_users(realm, user_set, bot_type) - class Command(BaseCommand): help = "Populate an initial database for Zulip Voyager" @@ -32,23 +23,7 @@ def handle(self, *args: Any, **options: Any) -> None: if Realm.objects.count() > 0: print("Database already initialized; doing nothing.") return - realm = Realm.objects.create(string_id=settings.SYSTEM_BOT_REALM) - - get_client("website") - get_client("API") - - internal_bots = [(bot['name'], bot['email_template'] % (settings.INTERNAL_BOT_DOMAIN,)) - for bot in settings.INTERNAL_BOTS] - create_users(realm, internal_bots, bot_type=UserProfile.DEFAULT_BOT) - # Set the owners for these bots to the bots themselves - bots = UserProfile.objects.filter(email__in=[bot_info[1] for bot_info in internal_bots]) - for bot in bots: - bot.bot_owner = bot - bot.save() - - # Initialize the email gateway bot as an API Super User - email_gateway_bot = get_system_bot(settings.EMAIL_GATEWAY_BOT) - do_change_is_admin(email_gateway_bot, True, permission="api_super_user") + create_internal_realm() self.stdout.write("Successfully populated database with initial data.\n") self.stdout.write("Please run ./manage.py generate_realm_creation_link " diff --git a/zilencer/management/commands/populate_db.py b/zilencer/management/commands/populate_db.py --- a/zilencer/management/commands/populate_db.py +++ b/zilencer/management/commands/populate_db.py @@ -2,7 +2,7 @@ import os import random from typing import Any, Callable, Dict, Iterable, List, \ - Mapping, Optional, Sequence, Set, Tuple + Mapping, Sequence, Tuple import ujson from datetime import datetime @@ -18,11 +18,12 @@ from zerver.lib.actions import STREAM_ASSIGNMENT_COLORS, check_add_realm_emoji, \ do_change_is_admin, do_send_messages, do_update_user_custom_profile_data_if_changed, \ try_add_realm_custom_profile_field, try_add_realm_default_custom_profile_field -from zerver.lib.bulk_create import bulk_create_streams, bulk_create_users +from zerver.lib.bulk_create import bulk_create_streams from zerver.lib.cache import cache_set from zerver.lib.generate_test_data import create_test_data from zerver.lib.onboarding import create_if_missing_realm_internal_bots from zerver.lib.push_notifications import logger as push_notifications_logger +from zerver.lib.server_initialization import create_internal_realm, create_users from zerver.lib.storage import static_path from zerver.lib.users import add_service from zerver.lib.url_preview.preview import CACHE_NAME as PREVIEW_CACHE_NAME @@ -31,8 +32,8 @@ from zerver.models import CustomProfileField, DefaultStream, Message, Realm, RealmAuditLog, \ RealmDomain, Recipient, Service, Stream, Subscription, \ UserMessage, UserPresence, UserProfile, Huddle, Client, \ - email_to_username, get_client, get_huddle, get_realm, get_stream, \ - get_system_bot, get_user, get_user_profile_by_id + get_client, get_huddle, get_realm, get_stream, \ + get_user, get_user_profile_by_id from zerver.lib.types import ProfileFieldData from scripts.lib.zulip_tools import get_or_create_dev_uuid_var_path @@ -73,16 +74,6 @@ def clear_database() -> None: # Suppress spammy output from the push notifications logger push_notifications_logger.disabled = True -def create_users(realm: Realm, name_list: Iterable[Tuple[str, str]], - bot_type: Optional[int]=None, - bot_owner: Optional[UserProfile]=None) -> None: - user_set = set() # type: Set[Tuple[str, str, str, bool]] - for full_name, email in name_list: - short_name = email_to_username(email) - user_set.add((email, full_name, short_name, True)) - tos_version = settings.TOS_VERSION if bot_type is None else None - bulk_create_users(realm, user_set, bot_type=bot_type, bot_owner=bot_owner, tos_version=tos_version) - def subscribe_users_to_streams(realm: Realm, stream_dict: Dict[str, Dict[str, Any]]) -> None: subscriptions_to_add = [] event_time = timezone_now() @@ -271,7 +262,7 @@ def handle(self, **options: Any) -> None: email = fname.lower() + '@zulip.com' names.append((full_name, email)) - create_users(zulip_realm, names) + create_users(zulip_realm, names, tos_version=settings.TOS_VERSION) iago = get_user("[email protected]", zulip_realm) do_change_is_admin(iago, True) @@ -511,12 +502,6 @@ def handle(self, **options: Any) -> None: generate_and_send_messages(job) if options["delete"]: - # Create the "website" and "API" clients; if we don't, the - # default values in zerver/decorators.py will not work - # with the Django test suite. - get_client("website") - get_client("API") - if options["test_suite"]: # Create test users; the MIT ones are needed to test # the Zephyr mirroring codepaths. @@ -525,13 +510,13 @@ def handle(self, **options: Any) -> None: ("Athena Consulting Exchange User (MIT)", "[email protected]"), ("Esp Classroom (MIT)", "[email protected]"), ] - create_users(mit_realm, testsuite_mit_users) + create_users(mit_realm, testsuite_mit_users, tos_version=settings.TOS_VERSION) testsuite_lear_users = [ ("King Lear", "[email protected]"), ("Cordelia Lear", "[email protected]"), ] - create_users(lear_realm, testsuite_lear_users) + create_users(lear_realm, testsuite_lear_users, tos_version=settings.TOS_VERSION) if not options["test_suite"]: # To keep the messages.json fixtures file for the test @@ -608,17 +593,6 @@ def handle(self, **options: Any) -> None: call_command('populate_analytics_db') self.stdout.write("Successfully populated test database.\n") -def create_internal_realm() -> None: - internal_realm = Realm.objects.create(string_id=settings.SYSTEM_BOT_REALM) - - internal_realm_bots = [(bot['name'], bot['email_template'] % (settings.INTERNAL_BOT_DOMAIN,)) - for bot in settings.INTERNAL_BOTS] - create_users(internal_realm, internal_realm_bots, bot_type=UserProfile.DEFAULT_BOT) - - # Initialize the email gateway bot as an API Super User - email_gateway_bot = get_system_bot(settings.EMAIL_GATEWAY_BOT) - do_change_is_admin(email_gateway_bot, True, permission="api_super_user") - recipient_hash = {} # type: Dict[int, Recipient] def get_recipient_by_id(rid: int) -> Recipient: if rid in recipient_hash:
diff --git a/zerver/tests/test_subs.py b/zerver/tests/test_subs.py --- a/zerver/tests/test_subs.py +++ b/zerver/tests/test_subs.py @@ -3183,7 +3183,7 @@ def test_subscriptions_query_count(self) -> None: dict(principals=ujson.dumps([user1.email, user2.email])), invite_only=True, ) - self.assert_length(queries, 38) + self.assert_length(queries, 40) # Test creating a public stream with announce when realm has a notification stream. notifications_stream = get_stream(self.streams[0], self.test_realm)
Deduplicate `create_internal_realm` between populate_db and initialize_voyager_db Once https://github.com/zulip/zulip/pull/13735 is merged, I believe that we can now do a commit series like this: * Move `create_internal_realm` and `create_users` from `populate_db` to some reasonable place, like `zerver/lib/server_initialization.py` * Move the `get_client("website")` common setup code block into `create_internal_realm` * Change `tos_version` to be an option to `create_users` * Port in the `bot_owner` logic in `initialize_voyager_db` into `create_internal_realm` * Verify that `initialize_voyager_db` has code identical to what's in `server_initialization.py`. * Delete the duplicate code in `initialize_voyager_db`. Once we've done this, I'd like to add a function in `zerver/lib/server_initialization.py` for checking if the server has been initialized (it can use the existing `Realm` check); we'll use this in `initialize_voyager_db` and then will be able to take advantage of it for checking whether `docker-zulip` needs to run initialization code. @mateuszmandera this is follow-up to work you did on cleaning up the system bot realm some number of months ago; maybe you should take this issue? Deduplicate `create_internal_realm` between populate_db and initialize_voyager_db Once https://github.com/zulip/zulip/pull/13735 is merged, I believe that we can now do a commit series like this: * Move `create_internal_realm` and `create_users` from `populate_db` to some reasonable place, like `zerver/lib/server_initialization.py` * Move the `get_client("website")` common setup code block into `create_internal_realm` * Change `tos_version` to be an option to `create_users` * Port in the `bot_owner` logic in `initialize_voyager_db` into `create_internal_realm` * Verify that `initialize_voyager_db` has code identical to what's in `server_initialization.py`. * Delete the duplicate code in `initialize_voyager_db`. Once we've done this, I'd like to add a function in `zerver/lib/server_initialization.py` for checking if the server has been initialized (it can use the existing `Realm` check); we'll use this in `initialize_voyager_db` and then will be able to take advantage of it for checking whether `docker-zulip` needs to run initialization code. @mateuszmandera this is follow-up to work you did on cleaning up the system bot realm some number of months ago; maybe you should take this issue?
Hello @zulip/server-bots, @zulip/server-production, @zulip/server-refactoring members, this issue was labeled with the "area: bots", "area: production installer", "area: refactoring" labels, so you may want to check it out! <!-- areaLabelAddition --> OK, #13735 is merged and this is now ready to work on. Hello @zulip/server-bots, @zulip/server-production, @zulip/server-refactoring members, this issue was labeled with the "area: bots", "area: production installer", "area: refactoring" labels, so you may want to check it out! <!-- areaLabelAddition --> OK, #13735 is merged and this is now ready to work on.
2020-01-28T14:34:33
zulip/zulip
13,771
zulip__zulip-13771
[ "13762" ]
f8f57bdfcc294b5b52b2837a450702e6eccbb3b5
diff --git a/scripts/lib/clean_venv_cache.py b/scripts/lib/clean_venv_cache.py --- a/scripts/lib/clean_venv_cache.py +++ b/scripts/lib/clean_venv_cache.py @@ -1,5 +1,6 @@ #!/usr/bin/env python3 import argparse +import glob import os import sys @@ -40,8 +41,8 @@ def add_current_venv_cache(venv_name: str) -> None: # list its requirements subdirectory. if not os.path.exists(reqs_dir): continue - for filename in os.listdir(reqs_dir): - requirements_file = os.path.join(reqs_dir, filename) + requirements_files = glob.glob(os.path.join(reqs_dir, "*.txt")) + for requirements_file in requirements_files: deps = expand_reqs(requirements_file) hash_val = hash_deps(deps) caches_in_use.add(os.path.join(VENV_CACHE_DIR, hash_val))
Creation of temporary files in requirements/ can cause provision to fail An example to trigger this for me is was as follows: * `cd requirements/` * edit file using editor which creates temporary file in this location (eg vim, depending on configuration) * `tools/provision` * provision fails with an error like ``` UnicodeDecodeError: 'utf-8' codec can't decode byte 0xcd in position 17: invalid continuation byte ``` This appears to be due to the venv management script not being able to handle the unexpected file produced by eg. vim. This is not a major issue, but is a bit of a strange issue to debug if you are not expecting it or are new, and potentially could be easy to fix.
Can you provide the full traceback and the filename? We probably are globbing for `.txt` files in that directory as part of our logic to check if anything has changed in the configuration.
2020-01-30T01:45:42
zulip/zulip
13,773
zulip__zulip-13773
[ "13693" ]
c618f0770ec5f0ae2d0276226cc4c683ef790918
diff --git a/version.py b/version.py --- a/version.py +++ b/version.py @@ -26,4 +26,4 @@ # historical commits sharing the same major version, in which case a # minor version bump suffices. -PROVISION_VERSION = '67.2' +PROVISION_VERSION = '67.3' diff --git a/zerver/lib/email_notifications.py b/zerver/lib/email_notifications.py --- a/zerver/lib/email_notifications.py +++ b/zerver/lib/email_notifications.py @@ -323,7 +323,6 @@ def do_send_missedmessage_events_reply_in_zulip(user_profile: UserProfile, 'message_count': message_count, 'unsubscribe_link': unsubscribe_link, 'realm_name_in_notifications': user_profile.realm_name_in_notifications, - 'show_message_content': message_content_allowed_in_missedmessage_emails(user_profile) }) triggers = list(message['trigger'] for message in missed_messages) @@ -396,18 +395,23 @@ def do_send_missedmessage_events_reply_in_zulip(user_profile: UserProfile, # If message content is disabled, then flush all information we pass to email. if not message_content_allowed_in_missedmessage_emails(user_profile): + realm = user_profile.realm context.update({ 'reply_to_zulip': False, 'messages': [], 'sender_str': "", - 'realm_str': user_profile.realm.name, + 'realm_str': realm.name, 'huddle_display_name': "", + 'show_message_content': False, + 'message_content_disabled_by_user': not user_profile.message_content_in_email_notifications, + 'message_content_disabled_by_realm': not realm.message_content_allowed_in_email_notifications, }) else: context.update({ 'messages': build_message_list(user_profile, list(m['message'] for m in missed_messages)), 'sender_str': ", ".join(sender.full_name for sender in senders), 'realm_str': user_profile.realm.name, + 'show_message_content': True, }) from_name = "Zulip missed messages" # type: str
diff --git a/zerver/tests/test_email_notifications.py b/zerver/tests/test_email_notifications.py --- a/zerver/tests/test_email_notifications.py +++ b/zerver/tests/test_email_notifications.py @@ -136,7 +136,7 @@ def normalize_string(self, s: str) -> str: def _get_tokens(self) -> List[str]: return ['mm' + str(random.getrandbits(32)) for _ in range(30)] - def _test_cases(self, msg_id: int, body: str, email_subject: str, + def _test_cases(self, msg_id: int, verify_body_include: List[str], email_subject: str, send_as_user: bool, verify_html_body: bool=False, show_message_content: bool=True, verify_body_does_not_include: Optional[List[str]]=None, @@ -161,9 +161,11 @@ def _test_cases(self, msg_id: int, body: str, email_subject: str, self.assertEqual(len(msg.reply_to), 1) self.assertIn(msg.reply_to[0], reply_to_emails) if verify_html_body: - self.assertIn(body, self.normalize_string(msg.alternatives[0][0])) + for text in verify_body_include: + self.assertIn(text, self.normalize_string(msg.alternatives[0][0])) else: - self.assertIn(body, self.normalize_string(msg.body)) + for text in verify_body_include: + self.assertIn(text, self.normalize_string(msg.body)) if verify_body_does_not_include is not None: for text in verify_body_does_not_include: self.assertNotIn(text, self.normalize_string(msg.body)) @@ -174,12 +176,12 @@ def _realm_name_in_missed_message_email_subject(self, realm_name_in_notification self.example_email('hamlet'), 'Extremely personal message!', ) - body = 'Extremely personal message!' + verify_body_include = ['Extremely personal message!'] email_subject = 'PMs with Othello, the Moor of Venice' if realm_name_in_notifications: email_subject = 'PMs with Othello, the Moor of Venice [Zulip Dev]' - self._test_cases(msg_id, body, email_subject, False) + self._test_cases(msg_id, verify_body_include, email_subject, False) def _extra_context_in_missed_stream_messages_mention(self, send_as_user: bool, show_message_content: bool=True) -> None: @@ -193,18 +195,26 @@ def _extra_context_in_missed_stream_messages_mention(self, send_as_user: bool, '@**King Hamlet**') if show_message_content: - body = ("Othello, the Moor of Venice: 1 2 3 4 5 6 7 8 9 10 @**King Hamlet** -- " - "You are receiving this because you were mentioned in Zulip Dev.") + verify_body_include = [ + "Othello, the Moor of Venice: 1 2 3 4 5 6 7 8 9 10 @**King Hamlet** -- ", + "You are receiving this because you were mentioned in Zulip Dev." + ] email_subject = '#Denmark > test' verify_body_does_not_include = [] # type: List[str] else: # Test in case if message content in missed email message are disabled. - body = 'Manage email preferences: http://zulip.testserver/#settings/notifications' + verify_body_include = [ + "This email does not include message content because you have disabled message ", + "http://zulip.testserver/help/pm-mention-alert-notifications ", + "View or reply in Zulip", + " Manage email preferences: http://zulip.testserver/#settings/notifications" + ] + email_subject = 'New missed messages' verify_body_does_not_include = ['Denmark > test', 'Othello, the Moor of Venice', '1 2 3 4 5 6 7 8 9 10 @**King Hamlet**', 'private', 'group', 'Reply to this email directly, or view it in Zulip'] - self._test_cases(msg_id, body, email_subject, send_as_user, + self._test_cases(msg_id, verify_body_include, email_subject, send_as_user, show_message_content=show_message_content, verify_body_does_not_include=verify_body_does_not_include, trigger='mentioned') @@ -221,18 +231,25 @@ def _extra_context_in_missed_stream_messages_wildcard_mention(self, send_as_user '@**all**') if show_message_content: - body = ("Othello, the Moor of Venice: 1 2 3 4 5 @**all** -- " - "You are receiving this because you were mentioned in Zulip Dev.") + verify_body_include = [ + "Othello, the Moor of Venice: 1 2 3 4 5 @**all** -- ", + "You are receiving this because you were mentioned in Zulip Dev." + ] email_subject = '#Denmark > test' verify_body_does_not_include = [] # type: List[str] else: # Test in case if message content in missed email message are disabled. - body = 'Manage email preferences: http://zulip.testserver/#settings/notifications' + verify_body_include = [ + "This email does not include message content because you have disabled message ", + "http://zulip.testserver/help/pm-mention-alert-notifications ", + "View or reply in Zulip", + " Manage email preferences: http://zulip.testserver/#settings/notifications" + ] email_subject = 'New missed messages' verify_body_does_not_include = ['Denmark > test', 'Othello, the Moor of Venice', '1 2 3 4 5 @**all**', 'private', 'group', 'Reply to this email directly, or view it in Zulip'] - self._test_cases(msg_id, body, email_subject, send_as_user, + self._test_cases(msg_id, verify_body_include, email_subject, send_as_user, show_message_content=show_message_content, verify_body_does_not_include=verify_body_does_not_include, trigger='wildcard_mentioned') @@ -246,10 +263,12 @@ def _extra_context_in_missed_stream_messages_email_notify(self, send_as_user: bo msg_id = self.send_stream_message( self.example_email('othello'), "denmark", '12') - body = ("Othello, the Moor of Venice: 1 2 3 4 5 6 7 8 9 10 12 -- " - "You are receiving this because you have email notifications enabled for this stream.") + verify_body_include = [ + "Othello, the Moor of Venice: 1 2 3 4 5 6 7 8 9 10 12 -- ", + "You are receiving this because you have email notifications enabled for this stream." + ] email_subject = '#Denmark > test' - self._test_cases(msg_id, body, email_subject, send_as_user, trigger='stream_email_notify') + self._test_cases(msg_id, verify_body_include, email_subject, send_as_user, trigger='stream_email_notify') def _extra_context_in_missed_stream_messages_mention_two_senders(self, send_as_user: bool) -> None: for i in range(0, 3): @@ -257,13 +276,17 @@ def _extra_context_in_missed_stream_messages_mention_two_senders(self, send_as_u msg_id = self.send_stream_message( self.example_email('othello'), "Denmark", '@**King Hamlet**') - body = ("Cordelia Lear: 0 1 2 Othello, the Moor of Venice: @**King Hamlet** -- " - "You are receiving this because you were mentioned in Zulip Dev.") + verify_body_include = [ + "Cordelia Lear: 0 1 2 Othello, the Moor of Venice: @**King Hamlet** -- ", + "You are receiving this because you were mentioned in Zulip Dev." + ] email_subject = '#Denmark > test' - self._test_cases(msg_id, body, email_subject, send_as_user, trigger='mentioned') + self._test_cases(msg_id, verify_body_include, email_subject, send_as_user, trigger='mentioned') def _extra_context_in_personal_missed_stream_messages(self, send_as_user: bool, - show_message_content: bool=True) -> None: + show_message_content: bool=True, + message_content_disabled_by_user: bool=False, + message_content_disabled_by_realm: bool=False) -> None: msg_id = self.send_personal_message( self.example_email('othello'), self.example_email('hamlet'), @@ -271,15 +294,28 @@ def _extra_context_in_personal_missed_stream_messages(self, send_as_user: bool, ) if show_message_content: - body = 'Extremely personal message!' + verify_body_include = ['Extremely personal message!'] email_subject = 'PMs with Othello, the Moor of Venice' verify_body_does_not_include = [] # type: List[str] else: - body = 'Manage email preferences: http://zulip.testserver/#settings/notifications' + if message_content_disabled_by_realm: + verify_body_include = [ + "This email does not include message content because your organization has disabled", + "http://zulip.testserver/help/hide-message-content-in-emails", + "View or reply in Zulip", + " Manage email preferences: http://zulip.testserver/#settings/notifications" + ] + elif message_content_disabled_by_user: + verify_body_include = [ + "This email does not include message content because you have disabled message ", + "http://zulip.testserver/help/pm-mention-alert-notifications ", + "View or reply in Zulip", + " Manage email preferences: http://zulip.testserver/#settings/notifications" + ] email_subject = 'New missed messages' verify_body_does_not_include = ['Othello, the Moor of Venice', 'Extremely personal message!', 'mentioned', 'group', 'Reply to this email directly, or view it in Zulip'] - self._test_cases(msg_id, body, email_subject, send_as_user, + self._test_cases(msg_id, verify_body_include, email_subject, send_as_user, show_message_content=show_message_content, verify_body_does_not_include=verify_body_does_not_include) @@ -289,9 +325,9 @@ def _reply_to_email_in_personal_missed_stream_messages(self, send_as_user: bool) self.example_email('hamlet'), 'Extremely personal message!', ) - body = 'Reply to this email directly, or view it in Zulip' + verify_body_include = ['Reply to this email directly, or view it in Zulip'] email_subject = 'PMs with Othello, the Moor of Venice' - self._test_cases(msg_id, body, email_subject, send_as_user) + self._test_cases(msg_id, verify_body_include, email_subject, send_as_user) def _reply_warning_in_personal_missed_stream_messages(self, send_as_user: bool) -> None: msg_id = self.send_personal_message( @@ -299,9 +335,9 @@ def _reply_warning_in_personal_missed_stream_messages(self, send_as_user: bool) self.example_email('hamlet'), 'Extremely personal message!', ) - body = 'Do not reply to this message.' + verify_body_include = ['Do not reply to this email.'] email_subject = 'PMs with Othello, the Moor of Venice' - self._test_cases(msg_id, body, email_subject, send_as_user) + self._test_cases(msg_id, verify_body_include, email_subject, send_as_user) def _extra_context_in_huddle_missed_stream_messages_two_others(self, send_as_user: bool, show_message_content: bool=True) -> None: @@ -315,16 +351,21 @@ def _extra_context_in_huddle_missed_stream_messages_two_others(self, send_as_use ) if show_message_content: - body = 'Othello, the Moor of Venice: Group personal message! -- Reply' + verify_body_include = ['Othello, the Moor of Venice: Group personal message! -- Reply'] email_subject = 'Group PMs with Iago and Othello, the Moor of Venice' verify_body_does_not_include = [] # type: List[str] else: - body = 'Manage email preferences: http://zulip.testserver/#settings/notifications' + verify_body_include = [ + "This email does not include message content because you have disabled message ", + "http://zulip.testserver/help/pm-mention-alert-notifications ", + "View or reply in Zulip", + " Manage email preferences: http://zulip.testserver/#settings/notifications" + ] email_subject = 'New missed messages' verify_body_does_not_include = ['Iago', 'Othello, the Moor of Venice Othello, the Moor of Venice', 'Group personal message!', 'mentioned', 'Reply to this email directly, or view it in Zulip'] - self._test_cases(msg_id, body, email_subject, send_as_user, + self._test_cases(msg_id, verify_body_include, email_subject, send_as_user, show_message_content=show_message_content, verify_body_does_not_include=verify_body_does_not_include) @@ -339,9 +380,9 @@ def _extra_context_in_huddle_missed_stream_messages_three_others(self, send_as_u 'Group personal message!', ) - body = 'Othello, the Moor of Venice: Group personal message! -- Reply' + verify_body_include = ['Othello, the Moor of Venice: Group personal message! -- Reply'] email_subject = 'Group PMs with Cordelia Lear, Iago, and Othello, the Moor of Venice' - self._test_cases(msg_id, body, email_subject, send_as_user) + self._test_cases(msg_id, verify_body_include, email_subject, send_as_user) def _extra_context_in_huddle_missed_stream_messages_many_others(self, send_as_user: bool) -> None: msg_id = self.send_huddle_message(self.example_email('othello'), @@ -351,9 +392,9 @@ def _extra_context_in_huddle_missed_stream_messages_many_others(self, send_as_us self.example_email('prospero')], 'Group personal message!') - body = 'Othello, the Moor of Venice: Group personal message! -- Reply' + verify_body_include = ['Othello, the Moor of Venice: Group personal message! -- Reply'] email_subject = 'Group PMs with Cordelia Lear, Iago, and 2 others' - self._test_cases(msg_id, body, email_subject, send_as_user) + self._test_cases(msg_id, verify_body_include, email_subject, send_as_user) def _deleted_message_in_missed_stream_messages(self, send_as_user: bool) -> None: msg_id = self.send_stream_message( @@ -426,7 +467,8 @@ def test_message_content_disabled_in_missed_message_notifications(self) -> None: mail.outbox = [] self._extra_context_in_missed_stream_messages_wildcard_mention(False, show_message_content=False) mail.outbox = [] - self._extra_context_in_personal_missed_stream_messages(False, show_message_content=False) + self._extra_context_in_personal_missed_stream_messages(False, show_message_content=False, + message_content_disabled_by_user=True) mail.outbox = [] self._extra_context_in_huddle_missed_stream_messages_two_others(False, show_message_content=False) @@ -530,7 +572,8 @@ def test_realm_message_content_allowed_in_email_notifications(self) -> None: # Emails don't have missed message content when message content is disabled by the user do_change_notification_settings(user, "message_content_in_email_notifications", False) mail.outbox = [] - self._extra_context_in_personal_missed_stream_messages(False, show_message_content=False) + self._extra_context_in_personal_missed_stream_messages(False, show_message_content=False, + message_content_disabled_by_user=True) # When message content is not allowed at realm level # Emails don't have missed message irrespective of message content setting of the user @@ -540,11 +583,14 @@ def test_realm_message_content_allowed_in_email_notifications(self) -> None: do_change_notification_settings(user, "message_content_in_email_notifications", True) mail.outbox = [] - self._extra_context_in_personal_missed_stream_messages(False, show_message_content=False) + self._extra_context_in_personal_missed_stream_messages(False, show_message_content=False, + message_content_disabled_by_realm=True) do_change_notification_settings(user, "message_content_in_email_notifications", False) mail.outbox = [] - self._extra_context_in_personal_missed_stream_messages(False, show_message_content=False) + self._extra_context_in_personal_missed_stream_messages(False, show_message_content=False, + message_content_disabled_by_user=True, + message_content_disabled_by_realm=True) def test_realm_emoji_in_missed_message(self) -> None: realm = get_realm("zulip") @@ -555,9 +601,9 @@ def test_realm_emoji_in_missed_message(self) -> None: realm_emoji_id = realm.get_active_emoji()['green_tick']['id'] realm_emoji_url = "http://zulip.testserver/user_avatars/%s/emoji/images/%s.png" % ( realm.id, realm_emoji_id,) - body = '<img alt=":green_tick:" src="%s" title="green tick" style="height: 20px;">' % (realm_emoji_url,) + verify_body_include = ['<img alt=":green_tick:" src="%s" title="green tick" style="height: 20px;">' % (realm_emoji_url,)] email_subject = 'PMs with Othello, the Moor of Venice' - self._test_cases(msg_id, body, email_subject, send_as_user=False, verify_html_body=True) + self._test_cases(msg_id, verify_body_include, email_subject, send_as_user=False, verify_html_body=True) def test_emojiset_in_missed_message(self) -> None: hamlet = self.example_user('hamlet') @@ -566,9 +612,9 @@ def test_emojiset_in_missed_message(self) -> None: msg_id = self.send_personal_message( self.example_email('othello'), self.example_email('hamlet'), 'Extremely personal message with a hamburger :hamburger:!') - body = '<img alt=":hamburger:" src="http://zulip.testserver/static/generated/emoji/images-twitter-64/1f354.png" title="hamburger" style="height: 20px;">' + verify_body_include = ['<img alt=":hamburger:" src="http://zulip.testserver/static/generated/emoji/images-twitter-64/1f354.png" title="hamburger" style="height: 20px;">'] email_subject = 'PMs with Othello, the Moor of Venice' - self._test_cases(msg_id, body, email_subject, send_as_user=False, verify_html_body=True) + self._test_cases(msg_id, verify_body_include, email_subject, send_as_user=False, verify_html_body=True) def test_stream_link_in_missed_message(self) -> None: msg_id = self.send_personal_message( @@ -576,9 +622,9 @@ def test_stream_link_in_missed_message(self) -> None: 'Come and join us in #**Verona**.') stream_id = get_stream('Verona', get_realm('zulip')).id href = "http://zulip.testserver/#narrow/stream/{stream_id}-Verona".format(stream_id=stream_id) - body = '<a class="stream" data-stream-id="5" href="{href}">#Verona</a'.format(href=href) + verify_body_include = ['<a class="stream" data-stream-id="5" href="{href}">#Verona</a'.format(href=href)] email_subject = 'PMs with Othello, the Moor of Venice' - self._test_cases(msg_id, body, email_subject, send_as_user=False, verify_html_body=True) + self._test_cases(msg_id, verify_body_include, email_subject, send_as_user=False, verify_html_body=True) def test_sender_name_in_missed_message(self) -> None: hamlet = self.example_user('hamlet')
Fix emails when disabling "Include message content in missed message notifications" I got a report from a user in the Recurse Center who was confused as to why they were getting missed-message emails that didn't contain any content. The emails also incorrectly claimed one couldn't reply because missed-message emails weren't configured on the server. There are 2 things we should do to fix (mostly centered on this block of code: ``` if not message_content_allowed_in_missedmessage_emails(user_profile): context.update({ 'reply_to_zulip': False, 'messages': [], 'sender_str': "", 'realm_str': user_profile.realm.name, 'huddle_display_name': "", ``` * [x] In this case, rather than the email displaying `Reply in Zulip\nDo not reply to this message. This Zulip server is not configured to accept incoming emails`, we should say "."Reply in Zulip\nThis email does not include message content because you have disabled [message content appearing in email notifications](https://zulipchat.com/help/pm-mention-alert-notifications)." * [x] We should potentially move that text from the footer to the message body, rather than having no body? * [x] We also should change s/Do not reply to this message/Do not reply to this email/ for better clarity. @hackerkid can you take charge of fixing this?
Hello @zulip/server-development members, this issue was labeled with the "area: emails" label, so you may want to check it out! <!-- areaLabelAddition --> @zulipbot claim Hello @hackerkid, you claimed this issue to work on it, but this issue and any referenced pull requests haven't been updated for 10 days. Are you still working on this issue? If so, please update this issue by leaving a comment on this issue to let me know that you're still working on it. Otherwise, I'll automatically remove you from this issue in 4 days. If you've decided to work on something else, simply comment `@zulipbot abandon` so that someone else can claim it and continue from where you left off. Thank you for your valuable contributions to Zulip! <!-- inactiveWarning -->
2020-01-30T13:11:30
zulip/zulip
13,789
zulip__zulip-13789
[ "13295" ]
65fe1a9eedc15b2e239447f03e80cfc604d5facf
diff --git a/zerver/webhooks/ansibletower/view.py b/zerver/webhooks/ansibletower/view.py --- a/zerver/webhooks/ansibletower/view.py +++ b/zerver/webhooks/ansibletower/view.py @@ -30,8 +30,19 @@ def api_ansibletower_webhook(request: HttpRequest, user_profile: UserProfile, check_send_webhook_message(request, user_profile, subject, body) return json_success() +def extract_friendly_name(payload: Dict[str, Any]) -> str: + tentative_job_name = payload.get("friendly_name", "") + if not tentative_job_name: + url = payload["url"] + segments = url.split("/") + tentative_job_name = segments[-3] + if tentative_job_name == "jobs": + tentative_job_name = "Job" + return tentative_job_name + def get_body(payload: Dict[str, Any]) -> str: - if (payload['friendly_name'] == 'Job'): + friendly_name = extract_friendly_name(payload) + if (friendly_name == 'Job'): hosts_list_data = payload['hosts'] hosts_data = [] for host in payload['hosts']: @@ -51,7 +62,7 @@ def get_body(payload: Dict[str, Any]) -> str: return ANSIBLETOWER_JOB_MESSAGE_TEMPLATE.format( name=payload['name'], - friendly_name=payload['friendly_name'], + friendly_name=friendly_name, id=payload['id'], url=payload['url'], status=status, @@ -67,7 +78,7 @@ def get_body(payload: Dict[str, Any]) -> str: data = { "name": payload['name'], - "friendly_name": payload['friendly_name'], + "friendly_name": friendly_name, "id": payload['id'], "url": payload['url'], "status": status
diff --git a/zerver/webhooks/ansibletower/tests.py b/zerver/webhooks/ansibletower/tests.py --- a/zerver/webhooks/ansibletower/tests.py +++ b/zerver/webhooks/ansibletower/tests.py @@ -55,6 +55,19 @@ def test_ansibletower_job_successful_message(self) -> None: self.send_and_test_stream_message('job_successful', expected_topic, expected_message) + def test_ansibletower_nine_job_successful_message(self) -> None: + """ + Test to see if awx/ansibletower 9.x.x job successful notifications are + handled just as successfully as prior to 9.x.x. + """ + expected_topic = "Demo Job Template" + expected_message = """ +Job: [#1 Demo Job Template](https://towerhost/#/jobs/playbook/1) was successful: +* localhost: Success +""".strip() + + self.send_and_test_stream_message('job_complete_successful_awx_9.1.1', expected_topic, expected_message) + def test_ansibletower_job_failed_message(self) -> None: """ Tests if ansibletower job failed notification is handled correctly
Error with AWX 7.0.0 Hello. I'm testing integration between **AWX 7.0.0** (Ansible Tower) by sending notifications in **Zulip 2.0.4**. During testing, I encounter an error from Ansible : ![image](https://user-images.githubusercontent.com/2016950/66895321-c5baf980-eff2-11e9-8649-06e5f2433133.png) And I immediatly receive an email warning from Zulip with the following content : ```Logger root, from module zerver.middleware line 291: Error generated by Ansible (user42@zulip.******.**) on zulip.******.** deployment Traceback (most recent call last): File "/srv/zulip-venv-cache/ebe617662f96425113e5a75344bbe5a0593f634a/zulip-py3-venv/lib/python3.7/site-packages/django/core/handlers/base.py", line 185, in _get_response response = wrapped_callback(request, *callback_args, **callback_kwargs) File "/srv/zulip-venv-cache/ebe617662f96425113e5a75344bbe5a0593f634a/zulip-py3-venv/lib/python3.7/site-packages/django/views/decorators/csrf.py", line 58, in wrapped_view return view_func(*args, **kwargs) File "./zerver/lib/request.py", line 289, in _wrapped_view_func return view_func(request, *args, **kwargs) File "./zerver/decorator.py", line 375, in _wrapped_func_arguments raise err File "./zerver/decorator.py", line 361, in _wrapped_func_arguments return view_func(request, user_profile, *args, **kwargs) File "./zerver/lib/request.py", line 289, in _wrapped_view_func return view_func(request, *args, **kwargs) File "./zerver/webhooks/ansibletower/view.py", line 27, in api_ansibletower_webhook body = get_body(payload) File "./zerver/webhooks/ansibletower/view.py", line 34, in get_body if (payload['friendly_name'] == 'Job'): KeyError: 'friendly_name' Deployed code: - git: 2.0.0-2546-ga1fa0b011 - ZULIP_VERSION: 2.0.4+git Request info: - path: /api/v1/external/ansibletower - POST: {} - REMOTE_ADDR: "['10.10.36.6']" - QUERY_STRING: "['api_key=******&topic=******&stream=******&topic=******" - SERVER_NAME: "['']" ``` I have already disable the "Disable SSL checking" but it seems also that the new version of AWX (the 7.0.0) contains new options for webhook like "HTTP Headers" and "HTTP Method". ![image](https://user-images.githubusercontent.com/2016950/66895546-5db8e300-eff3-11e9-9954-8e1df5485e17.png) Note that I have already notifications from GitLab so the notification service works in my self-hosted Zulip configuration.
Hello @zulip/server-integrations members, this issue was labeled with the "area: integrations" label, so you may want to check it out! <!-- areaLabelAddition --> @Hypro999 do you have time to look into this? It may be that our Ansible Tower integration needs to be updated for upstream API changes. @zulipbot claim @zulipbot abandon @sulian if it's okay with you, could you please share a (redacted) version of the payload that you received that caused this? If you're not sure of how, here's how you can do this in 5 easy steps: 1. Go to https://webhook.site 2. The site will give you a target URL. Something like `https://webhook.site/fec9a205-ecf8-422e-97a2-d42d16048e1d` - copy this to your clipboard. 3. Then go to your AWX installation and replace the URL field with the target URL that https://webhook.site gave you (paste it here). 4. Trigger the webhook event. 5. Now if you go back to webhook.site it will show you the payload that it just received. If you could just redact that (remove personal info by replacing with `*`s for example) and post the results here, I would greatly appreciate it since setting up a fresh AWX installation seems like it'll be **really** tedious (a worst case measure if at all avoidable). If you'd like to refrain from doing this, then that's totally fine and understandable - so the decision is up to you. Also, sorry for the really late response, I remember that I planned on doing this a little later after seeing the @-mention, but then it slipped my mind before I could get around to it. @danielepesciallo that's great! Can you trigger a payload for events which involve a "job" so that we can see if the keys have been renamed/changed? Thanks for the help! 🙂 Finding change logs for the AWX project was quite a bit of hassle, but after a fair amount of digging around, I was able to find: https://groups.google.com/forum/#!searchin/awx-project/7.0.0%7Csort:date/awx-project/lemzchJocjk/HAOXI0DnAAAJ Posting here for future reference. I'll try to see what progress I can make on this issue over the course of this week (or whatever's left of it) - it may take a few days though. I may need to dig around a bit more and perhaps skim through their source code to better understand what's going on on their end and how they're sending webhook payloads since they sadly don't have any documentation for this apart from https://docs.ansible.com/ansible-tower/latest/html/userguide/notifications.html#webhook (which makes this a lot harder). I also need to determine exactly when they made this change so that we can have this integration maintain backwards compatibility. Ok, so today I took the time to set up my own installation of Ansible + AWX so that I can take a closer look at what's going on and to test things out. Here are the installation details: 1. OS: Ubuntu 18.04.3 LTS x86_64 2. Ansible: ansible 2.5.1 (using python 2.7.17 by default) 3. AWX: awx 7.0.0. (commit: 4edfe7e5fc8b31b71e53091cb9271210fbee641b) Then I ran the demo job template and I found that I was unable to reproduce the error condition that @sulian faced. Here's what I got: When running the job: ```json { "id": 5, "name": "Demo Job Template", "url": "https://towerhost/#/jobs/playbook/5", "created_by": "admin", "started": "2020-01-23T08:37:51.227591+00:00", "finished": null, "status": "running", "traceback": "", "inventory": "Demo Inventory", "project": "Demo Project", "playbook": "hello_world.yml", "credential": "Demo Credential", "limit": "", "extra_vars": "{}", "hosts": {}, "friendly_name": "Job" } ``` After successful completion of the job: ```json { "id": 5, "name": "Demo Job Template", "url": "https://towerhost/#/jobs/playbook/5", "created_by": "admin", "started": "2020-01-23T08:37:51.227591+00:00", "finished": "2020-01-23T08:38:01.452808+00:00", "status": "successful", "traceback": "", "inventory": "Demo Inventory", "project": "Demo Project", "playbook": "hello_world.yml", "credential": "Demo Credential", "limit": "", "extra_vars": "{}", "hosts": { "localhost": { "failed": false, "changed": 0, "dark": 0, "failures": 0, "ok": 2, "processed": 1, "skipped": 0 } }, "friendly_name": "Job" } ``` So I'm not too sure of what's going on on your end. Now, I've reconfigured my server to use the latest versions of both Ansible and AWX since @danielepesciallo that's what you seem to be using. Here are the new installation details: OS: Ubuntu 18.04.3 LTS x86_64 Ansible: ansible 2.9.4 (using python 2.7.17 by default) AWX: awx 9.1.1 (commit: 4edfe7e5fc8b31b71e53091cb9271210fbee641b) And the good news here (depending on how you look at it I guess) is that I was able to reproduce the same data (and thus the error). When running the job: ```json { "id": 1, "name": "Demo Job Template", "url": "https://towerhost/#/jobs/playbook/1", "created_by": "admin", "started": "2020-01-23T09:15:45.741213+00:00", "finished": null, "status": "running", "traceback": "", "inventory": "Demo Inventory", "project": "Demo Project", "playbook": "hello_world.yml", "credential": "Demo Credential", "limit": "", "extra_vars": "{}", "hosts": {} } ``` After successful completion of the job: ```json { "id": 1, "name": "Demo Job Template", "url": "https://towerhost/#/jobs/playbook/1", "created_by": "admin", "started": "2020-01-23T09:15:45.741213+00:00", "finished": "2020-01-23T09:15:49.729882+00:00", "status": "successful", "traceback": "", "inventory": "Demo Inventory", "project": "Demo Project", "playbook": "hello_world.yml", "credential": "Demo Credential", "limit": "", "extra_vars": "{}", "hosts": { "localhost": { "failed": false, "changed": 0, "dark": 0, "failures": 0, "ok": 2, "processed": 1, "skipped": 0, "rescued": 0, "ignored": 0 } } } ``` Just an update: I've tried out a few more versions of AWX to see if I can determine where the change happened and this issue seems to have been introduced from AWX v9.0.0 onward. Though sadly the release notes at https://groups.google.com/forum/#!searchin/awx-project/AWX$209.0.0%7Csort:date/awx-project/YZqcSmspqL0/tiyQqZhbBAAJ don't seem to ever suggest this change. I think that the simplest and most efficient way to handle this would be to just use AWX's [custom notifications templates functionality](https://docs.ansible.com/ansible-tower/latest/html/userguide/notifications.html#create-a-notification-template) to re-introduce the missing key in the webhook payloads. Particularly using the following template: ```json { "id": "{{ job.id }}", "name": "{{ job.name }}", "friendly_name": "{{ job_friendly_name }}", "status": "{{ job.status }}", "url": "{{ url }}", "hosts": {} } ``` should do the trick. Of course, this template is the bare minimum, any extra information that the recipient is interested in can be added, but these are the bare minimum keys. Note: I effectively eliminated the hosts key in this example because after an hour or poking around, I found that reconstructing the hosts field might not be possible because AWX currently does not support using `host_name` in their custom notification templates (either that or I didn't get what "hosts" exactly are) despite having `host_status_counts`. Sources: 1. https://docs.ansible.com/ansible-tower/latest/html/userguide/notifications.html#create-a-notification-template 2. https://docs.ansible.com/ansible-tower/3.6.2/html/installandreference/notification_parameters_supported.html#ir-notifications-reference 3. https://github.com/ansible/awx/blob/b7a064b05d9724ad88b6fce4ec7c0f776a7c51bb/awx/main/models/jobs.py#L692 Sample response from AWX: ```json { "id": "13", "name": "Demo Job Template", "friendly_name": "Job", "status": "running", "url": "https://towerhost/#/jobs/playbook/13", "hosts": {} } ``` @danielepesciallo can you give this a try? Also, @timabbott, can I get your opinions on this based on the presented findings? I think we might be able to just stop using the `friendly_name` field from the payload and instead just use the `name` element -- that is potentially better output in any case. And I think the conditionals we have in the code on `friendly_name` are mostly conditionals on whether a `hosts` element are there plus some duplicated code. So we might be able to unify the templates as part of this. > I think that the simplest and most efficient way to handle this would be to just use AWX's [custom notifications templates functionality](https://docs.ansible.com/ansible-tower/latest/html/userguide/notifications.html#create-a-notification-template) to re-introduce the missing key in the webhook payloads. > > Particularly using the following template: > > ```json > { > "id": "{{ job.id }}", > "name": "{{ job.name }}", > "friendly_name": "{{ job_friendly_name }}", > "status": "{{ job.status }}", > "url": "{{ url }}", > "hosts": {} > } > ``` > > should do the trick. Of course, this template is the bare minimum, any extra information that the recipient is interested in can be added, but these are the bare minimum keys. > Note: I effectively eliminated the hosts key in this example because after an hour or poking around, I found that reconstructing the hosts field might not be possible because AWX currently does not support using `host_name` in their custom notification templates (either that or I didn't get what "hosts" exactly are) despite having `host_status_counts`. Sources: > > 1. https://docs.ansible.com/ansible-tower/latest/html/userguide/notifications.html#create-a-notification-template > 2. https://docs.ansible.com/ansible-tower/3.6.2/html/installandreference/notification_parameters_supported.html#ir-notifications-reference > 3. https://github.com/ansible/awx/blob/b7a064b05d9724ad88b6fce4ec7c0f776a7c51bb/awx/main/models/jobs.py#L692 > > Sample response from AWX: > > ```json > { > "id": "13", > "name": "Demo Job Template", > "friendly_name": "Job", > "status": "running", > "url": "https://towerhost/#/jobs/playbook/13", > "hosts": {} > } > ``` > > @danielepesciallo can you give this a try? > Also, @timabbott, can I get your opinions on this based on the presented findings? Hi @Hypro999, I tried your trick and now i'm able to receive AWX Notification! Thanks a lot!! 😄 Daniele
2020-02-01T09:20:58
zulip/zulip
13,843
zulip__zulip-13843
[ "13588" ]
07602c4aacc6cac74a1df24977c7c10e73de2f64
diff --git a/zerver/lib/topic.py b/zerver/lib/topic.py --- a/zerver/lib/topic.py +++ b/zerver/lib/topic.py @@ -23,7 +23,7 @@ # Only use these constants for events. ORIG_TOPIC = "orig_subject" TOPIC_NAME = "subject" -TOPIC_LINKS = "subject_links" +TOPIC_LINKS = "topic_links" MATCH_TOPIC = "match_subject" # This constant is actually embedded into
diff --git a/frontend_tests/node_tests/echo.js b/frontend_tests/node_tests/echo.js --- a/frontend_tests/node_tests/echo.js +++ b/frontend_tests/node_tests/echo.js @@ -62,7 +62,7 @@ run_test('process_from_server for differently rendered messages', () => { timestamp: old_value, is_me_message: old_value, submessages: old_value, - subject_links: old_value, + topic_links: old_value, }, }; const server_messages = [ @@ -72,7 +72,7 @@ run_test('process_from_server for differently rendered messages', () => { timestamp: new_value, is_me_message: new_value, submessages: new_value, - subject_links: new_value, + topic_links: new_value, }, ]; echo._patch_waiting_for_awk(waiting_for_ack); @@ -86,7 +86,7 @@ run_test('process_from_server for differently rendered messages', () => { timestamp: new_value, is_me_message: new_value, submessages: new_value, - subject_links: new_value, + topic_links: new_value, }]); });
Rename `subject_links` to `topic_links` in our API This is an element of the broader `subject` -> `topic` migration (see #1192) that should be straightforward to change, because I believe the mobile apps don't access `subject_links` yet, so there's no compatibility work required. (What the data is used for in the webapp is the little in-topic-field links we show when there is a link or linkifier matching the topic line of the message). @gnprice to confirm I'm reading the mobile codebase correctly that it's indeed not accessed. Noticed in #13587; tagging as a priority since this sort of API migration gets more complex when delayed. We should be sure to look again at updating the docs as discussed in #13587 once this is complete.
Hello @zulip/server-api members, this issue was labeled with the "area: api", "area: documentation (api and integrations)" labels, so you may want to check it out! <!-- areaLabelAddition --> We don't usefully *use* that field, but it looks like we do access it. ... But I think the impact of it going missing would be limited to us sometimes recreating it in our internal data structures, with a value of `undefined`. Specifically, there's this in `src/message/messagesReducer.js`: ``` return { ...state, [action.message_id]: { ...oldMessage, content: action.rendered_content || oldMessage.content, subject: action.subject || oldMessage.subject, subject_links: action.subject_links || oldMessage.subject_links, ``` That's in the handler for `update_message` events. We're maintaining the `subject_links` property on our own copy of the message, starting from the value that came straight from the server's JSON for the message. (The line is pretty bogus in any event -- our type annotations say that the `subject_links` field on both message and event are either `string[]` or `$ReadOnlyArray<string>`, which if true would mean the value is always truthy and the `||` is meaningless.) So, it *should* work fine. What I'd ask is just: * When developing this change, please [point the mobile app](https://github.com/zulip/zulip-mobile/blob/master/docs/howto/dev-server.md#summary-checklist) at a dev server with the change, and test that it doesn't break. * No mobile dev environment needed; just the published release of the app. * In particular, test the following sequence: * Go look at some message. * From another device, edit the message. * Check that the edit comes through correctly, and there's no crash or error message. @zulipbot claim @zulipbot abandon @zulipbot claim Hello @hackerkid, you claimed this issue to work on it, but this issue and any referenced pull requests haven't been updated for 10 days. Are you still working on this issue? If so, please update this issue by leaving a comment on this issue to let me know that you're still working on it. Otherwise, I'll automatically remove you from this issue in 4 days. If you've decided to work on something else, simply comment `@zulipbot abandon` so that someone else can claim it and continue from where you left off. Thank you for your valuable contributions to Zulip! <!-- inactiveWarning -->
2020-02-07T12:35:59
zulip/zulip
13,849
zulip__zulip-13849
[ "13847" ]
27b15a9722144b779ffe597b189925f21d53612d
diff --git a/zerver/webhooks/hellosign/view.py b/zerver/webhooks/hellosign/view.py --- a/zerver/webhooks/hellosign/view.py +++ b/zerver/webhooks/hellosign/view.py @@ -7,6 +7,7 @@ from zerver.lib.response import json_success from zerver.lib.webhooks.common import check_send_webhook_message from zerver.models import UserProfile +import ujson IS_AWAITING_SIGNATURE = "is awaiting the signature of {awaiting_recipients}" WAS_JUST_SIGNED_BY = "was just signed by {signed_recipients}" @@ -54,8 +55,11 @@ def get_recipients_text(recipients: List[str]) -> str: @api_key_only_webhook_view('HelloSign') @has_request_variables def api_hellosign_webhook(request: HttpRequest, user_profile: UserProfile, - payload: Dict[str, Dict[str, Any]]=REQ(argument_type='body')) -> HttpResponse: - body = get_message_body(payload) - topic = payload['signature_request']['title'] - check_send_webhook_message(request, user_profile, topic, body) - return json_success() + payload: Dict[str, Dict[str, Any]]=REQ( + whence='json', converter=ujson.loads)) -> HttpResponse: + if "signature_request" in payload: + body = get_message_body(payload) + topic = payload['signature_request']['title'] + check_send_webhook_message(request, user_profile, topic, body) + + return json_success({"msg": "Hello API Event Received"})
diff --git a/zerver/webhooks/hellosign/tests.py b/zerver/webhooks/hellosign/tests.py --- a/zerver/webhooks/hellosign/tests.py +++ b/zerver/webhooks/hellosign/tests.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- from zerver.lib.test_classes import WebhookTestCase - +from typing import Dict class HelloSignHookTests(WebhookTestCase): STREAM_NAME = 'hellosign' @@ -12,21 +12,21 @@ def test_signatures_message(self) -> None: expected_message = ("The `NDA with Acme Co.` document is awaiting the signature of " "Jack, and was just signed by Jill.") self.send_and_test_stream_message('signatures', expected_topic, expected_message, - content_type="application/x-www-form-urlencoded") + content_type=None) def test_signatures_message_signed_by_one(self) -> None: expected_topic = "NDA with Acme Co." expected_message = ("The `NDA with Acme Co.` document was just signed by Jill.") self.send_and_test_stream_message('signatures_signed_by_one_signatory', expected_topic, expected_message, - content_type="application/x-www-form-urlencoded") + content_type=None) def test_signatures_message_with_four_signatories(self) -> None: expected_topic = "Signature doc" expected_message = ("The `Signature doc` document is awaiting the signature of " "Eeshan Garg, John Smith, Jane Doe, and Stephen Strange.") self.send_and_test_stream_message('signatures_with_four_signatories', expected_topic, expected_message, - content_type="application/x-www-form-urlencoded") + content_type=None) def test_signatures_message_with_own_subject(self) -> None: expected_topic = "Our own subject." @@ -34,7 +34,7 @@ def test_signatures_message_with_own_subject(self) -> None: expected_message = ("The `NDA with Acme Co.` document is awaiting the signature of " "Jack, and was just signed by Jill.") self.send_and_test_stream_message('signatures_with_own_subject', expected_topic, expected_message, - content_type="application/x-www-form-urlencoded", topic=expected_topic) + content_type=None, topic=expected_topic) - def get_body(self, fixture_name: str) -> str: - return self.webhook_fixture_data("hellosign", fixture_name, file_type="json") + def get_body(self, fixture_name: str) -> Dict[str, str]: + return {"json": self.webhook_fixture_data("hellosign", fixture_name, file_type="json")}
Webhooks posting as form/multipart causes 500s Hellosign [posts their callback as form/multipart](https://app.hellosign.com/api/eventsAndCallbacksWalkthrough), which Django only permits to be read once. Attempts to access `request.body` after the initial read throw "django.http.request.RawPostDataException: You cannot access body after reading from request's data stream". You can trivially reproduce this locally, by provisioning a webhook API key, then invoking: curl -F json=foobar -XPOST \ http://localhost:9991/api/v1/external/hellosign?api_key=key Which results in: ``` 2020-02-07 22:57:28.177 ERR [] Traceback (most recent call last): File "/home/chris/projects/zulip/zerver/decorator.py", line 356, in _wrapped_func_arguments return view_func(request, user_profile, *args, **kwargs) File "/home/chris/projects/zulip/zerver/lib/request.py", line 306, in _wrapped_view_func request_body = request.body File "/srv/zulip-py3-venv/lib/python3.7/site-packages/django/http/request.py", line 265, in body raise RawPostDataException("You cannot access body after reading from request's data stream") django.http.request.RawPostDataException: You cannot access body after reading from request's data stream During handling of the above exception, another exception occurred: Traceback (most recent call last): File "/srv/zulip-py3-venv/lib/python3.7/site-packages/django/core/handlers/base.py", line 185, in _get_response response = wrapped_callback(request, *callback_args, **callback_kwargs) File "/srv/zulip-py3-venv/lib/python3.7/site-packages/django/views/decorators/csrf.py", line 58, in wrapped_view return view_func(*args, **kwargs) File "/home/chris/projects/zulip/zerver/lib/request.py", line 372, in _wrapped_view_func return view_func(request, *args, **kwargs) File "/home/chris/projects/zulip/zerver/decorator.py", line 369, in _wrapped_func_arguments log_exception_to_webhook_logger(**kwargs) File "/home/chris/projects/zulip/zerver/decorator.py", line 285, in log_exception_to_webhook_logger payload = request.body File "/srv/zulip-py3-venv/lib/python3.7/site-packages/django/http/request.py", line 265, in body raise RawPostDataException("You cannot access body after reading from request's data stream") django.http.request.RawPostDataException: You cannot access body after reading from request's data stream ```
I was able to fixup the HelloSign webhook by changing the decorator arguments: ```diff diff --git a/zerver/webhooks/hellosign/view.py b/zerver/webhooks/hellosign/view.py index 7a89208e3..5a6ea6814 100644 --- a/zerver/webhooks/hellosign/view.py +++ b/zerver/webhooks/hellosign/view.py @@ -54,8 +54,10 @@ def get_recipients_text(recipients: List[str]) -> str: @api_key_only_webhook_view('HelloSign') @has_request_variables def api_hellosign_webhook(request: HttpRequest, user_profile: UserProfile, - payload: Dict[str, Dict[str, Any]]=REQ(argument_type='body')) -> HttpResponse: - body = get_message_body(payload) - topic = payload['signature_request']['title'] - check_send_webhook_message(request, user_profile, topic, body) + payloads: List[Dict[str, Dict[str, Any]]]=REQ(aliases=['json'])) -> HttpResponse: + handleable_payloads = [p for p in payloads if "signature_request" in p] + for payload in handleable_payloads: + body = get_message_body(payload) + topic = payload['signature_request']['title'] + check_send_webhook_message(request, user_profile, topic, body) return json_success() ```
2020-02-08T07:15:55
zulip/zulip
13,866
zulip__zulip-13866
[ "13845" ]
cb2c96f7368767e86341ec24949d8c64e11de0e8
diff --git a/tools/lib/provision_inner.py b/tools/lib/provision_inner.py --- a/tools/lib/provision_inner.py +++ b/tools/lib/provision_inner.py @@ -172,12 +172,7 @@ def main(options: argparse.Namespace) -> int: else: print("RabbitMQ is already configured.") - migration_status_path = os.path.join(UUID_VAR_PATH, "migration_status_dev") - dev_template_db_status = template_database_status( - migration_status=migration_status_path, - settings="zproject.settings", - database_name="zulip", - ) + dev_template_db_status = template_database_status('dev') if options.is_force or dev_template_db_status == 'needs_rebuild': run(["tools/setup/postgres-init-dev-db"]) run(["tools/do-destroy-rebuild-database"]) @@ -186,7 +181,7 @@ def main(options: argparse.Namespace) -> int: elif dev_template_db_status == 'current': print("No need to regenerate the dev DB.") - test_template_db_status = template_database_status() + test_template_db_status = template_database_status('test') if options.is_force or test_template_db_status == 'needs_rebuild': run(["tools/setup/postgres-init-test-db"]) run(["tools/do-destroy-rebuild-test-database"])
diff --git a/zerver/lib/test_fixtures.py b/zerver/lib/test_fixtures.py --- a/zerver/lib/test_fixtures.py +++ b/zerver/lib/test_fixtures.py @@ -5,7 +5,7 @@ import hashlib import subprocess import sys -from typing import Any, List, Optional, Set +from typing import Any, List, Set from importlib import import_module from io import StringIO import glob @@ -24,9 +24,23 @@ from scripts.lib.zulip_tools import get_dev_uuid_var_path, run, \ file_or_package_hash_updated, TEMPLATE_DATABASE_DIR +class DatabaseType: + def __init__(self, database_name: str, settings: str, migration_status: str): + self.database_name = database_name + self.settings = settings + self.migration_status = migration_status + UUID_VAR_DIR = get_dev_uuid_var_path() FILENAME_SPLITTER = re.compile(r'[\W\-_]') +DEV_DATABASE_TYPE = DatabaseType(database_name='zulip', + settings='zproject.settings', + migration_status=os.path.join(UUID_VAR_DIR, "migration_status_dev")) + +TEST_DATABASE_TYPE = DatabaseType(database_name='zulip_test_template', + settings='zproject.test_settings', + migration_status=os.path.join(UUID_VAR_DIR, 'migration_status_test')) + def run_db_migrations(platform: str) -> None: if platform == 'dev': migration_status_file = 'migration_status_dev' @@ -70,7 +84,7 @@ def update_test_databases_if_required(use_force: bool=False, If use_force is specified, it will always do a full rebuild. """ generate_fixtures_command = ['tools/setup/generate-fixtures'] - test_template_db_status = template_database_status() + test_template_db_status = template_database_status('test') if use_force or test_template_db_status == 'needs_rebuild': generate_fixtures_command.append('--force') elif test_template_db_status == 'run_migrations': @@ -186,43 +200,33 @@ def check_setting_hash(setting_name: str, status_dir: str) -> bool: return _check_hash(source_hash_file, target_content) -def template_database_status( - database_name: str='zulip_test_template', - migration_status: Optional[str]=None, - settings: str='zproject.test_settings', - check_files: Optional[List[str]]=None, - check_settings: Optional[List[str]]=None) -> str: +def template_database_status(database_type: str) -> str: # This function returns a status string specifying the type of # state the template db is in and thus the kind of action required. - if check_files is None: - check_files = [ - 'zilencer/management/commands/populate_db.py', - 'zerver/lib/bulk_create.py', - 'zerver/lib/generate_test_data.py', - 'zerver/lib/server_initialization.py', - 'tools/setup/postgres-init-test-db', - 'tools/setup/postgres-init-dev-db', - 'zerver/migrations/0258_enable_online_push_notifications_default.py', - ] - if check_settings is None: - check_settings = [ - 'REALM_INTERNAL_BOTS', - ] + if database_type == 'dev': + database = DEV_DATABASE_TYPE + elif database_type == 'test': + database = TEST_DATABASE_TYPE + + check_files = [ + 'zilencer/management/commands/populate_db.py', + 'zerver/lib/bulk_create.py', + 'zerver/lib/generate_test_data.py', + 'zerver/lib/server_initialization.py', + 'tools/setup/postgres-init-test-db', + 'tools/setup/postgres-init-dev-db', + 'zerver/migrations/0258_enable_online_push_notifications_default.py', + ] + check_settings = [ + 'REALM_INTERNAL_BOTS', + ] # Construct a directory to store hashes named after the target database. - status_dir = os.path.join(UUID_VAR_DIR, database_name + '_db_status') + status_dir = os.path.join(UUID_VAR_DIR, database.database_name + '_db_status') if not os.path.exists(status_dir): os.mkdir(status_dir) - # Arguably we should move this inside status_dir, but it'd require - # a bit of work since generate_fixtures expects to also know the - # path, and make the directory. We may also want to refactor this - # logic to be inside a couple class objects for the two databases, - # rather than a random-feeling set of option flags. - if migration_status is None: - migration_status = os.path.join(UUID_VAR_DIR, 'migration_status_test') - - if database_exists(database_name): + if database_exists(database.database_name): # To ensure Python evaluates all the hash tests (and thus creates the # hash files about the current state), we evaluate them in a # list and then process the result @@ -239,12 +243,12 @@ def template_database_status( # migrations without spending a few 100ms parsing all the # Python migration code. paths = glob.glob('*/migrations/*.py') - check_migrations = file_or_package_hash_updated(paths, "migrations_hash_" + database_name, + check_migrations = file_or_package_hash_updated(paths, "migrations_hash_" + database.database_name, is_force=False) if not check_migrations: return 'current' - migration_op = what_to_do_with_migrations(migration_status, settings=settings) + migration_op = what_to_do_with_migrations(database.migration_status, settings=database.settings) if migration_op == 'scrap': return 'needs_rebuild'
provision: Clean up interface for template_database_status Following 84edb5c5162e9299ab5fb6a0a59c402484caa98c, it's clear that we should have a cleaner database for how the arguments to `template_database_status` are passed to correctly check whether the target database (whether zulip_test_template, used by `test-backend`, or the ). I think we could do significantly better by defining a class or TypedDict defining the settings for the `zulip_test_template` and `zulip` databases and accessing those. (Or maybe just having there be a single argument for which database we're checking?). Fundamentally, there's only two valid ways to call `template_database_status`, so it shouldn't take like 5 arguments with defaults, it should take one argument that specifies which database we want to access.
Hello @zulip/server-development, @zulip/server-refactoring, @zulip/server-tooling members, this issue was labeled with the "area: provision", "area: refactoring", "area: tooling" labels, so you may want to check it out! <!-- areaLabelAddition --> @zulipbot claim
2020-02-10T14:01:23
zulip/zulip
13,943
zulip__zulip-13943
[ "13496" ]
0d4bf8613010019bfbc53f2c9c68b41c96cfb27c
diff --git a/zerver/lib/export.py b/zerver/lib/export.py --- a/zerver/lib/export.py +++ b/zerver/lib/export.py @@ -832,18 +832,28 @@ def fetch_user_profile_cross_realm(response: TableData, config: Config, context: realm = context['realm'] response['zerver_userprofile_crossrealm'] = [] + bot_name_to_default_email = { + "NOTIFICATION_BOT": "[email protected]", + "EMAIL_GATEWAY_BOT": "[email protected]", + "WELCOME_BOT": "[email protected]", + } + if realm.string_id == settings.SYSTEM_BOT_REALM: return - for bot_user in [ - get_system_bot(settings.NOTIFICATION_BOT), - get_system_bot(settings.EMAIL_GATEWAY_BOT), - get_system_bot(settings.WELCOME_BOT), - ]: - recipient_id = Recipient.objects.get(type_id=bot_user.id, type=Recipient.PERSONAL).id + for bot in settings.INTERNAL_BOTS: + bot_name = bot["var_name"] + if bot_name not in bot_name_to_default_email: + continue + + bot_email = bot["email_template"] % (settings.INTERNAL_BOT_DOMAIN,) + bot_default_email = bot_name_to_default_email[bot_name] + bot_user_id = get_system_bot(bot_email).id + + recipient_id = Recipient.objects.get(type_id=bot_user_id, type=Recipient.PERSONAL).id response['zerver_userprofile_crossrealm'].append(dict( - email=bot_user.email, - id=bot_user.id, + email=bot_default_email, + id=bot_user_id, recipient_id=recipient_id, )) diff --git a/zerver/lib/import_realm.py b/zerver/lib/import_realm.py --- a/zerver/lib/import_realm.py +++ b/zerver/lib/import_realm.py @@ -825,10 +825,6 @@ def do_import_realm(import_dir: Path, subdomain: str, processes: int=1) -> Realm # Remap the user IDs for notification_bot and friends to their # appropriate IDs on this server for item in data['zerver_userprofile_crossrealm']: - if item['email'].startswith("emailgateway@"): - # The email gateway bot's email is customized to a - # different domain on some servers. - item['email'] = settings.EMAIL_GATEWAY_BOT logging.info("Adding to ID map: %s %s" % (item['id'], get_system_bot(item['email']).id)) new_user_id = get_system_bot(item['email']).id update_id_map(table='user_profile', old_id=item['id'], new_id=new_user_id)
Fix data import when `EMAIL_GATEWAY_BOT` setting is in use If a user has changed the `EMAIL_GATEWAY_BOT` setting (and the bot's email address), our data import/export process into Zulip crashes with an exception (https://chat.zulip.org/#narrow/stream/31-production-help/topic/Realm.20import/near/800549). I think it's possible that this could be fixed by just adding a bit of code in `fetch_user_profile_cross_realm` to canonicalize the `EMAIL_GATEWAY_BOT` email address to Zulip's default value (so that all exports use the standard email address). We should reproduce the issue and test that fix in a development environment.
Hello @zulip/server-misc members, this issue was labeled with the "area: export/import" label, so you may want to check it out! <!-- areaLabelAddition --> @zulipbot claim Hello @akornor, you have been unassigned from this issue because you have not updated this issue or any referenced pull requests for over 14 days. You can reclaim this issue or claim any other issue by commenting `@zulipbot claim` on that issue. Thanks for your contributions, and hope to see you again soon! @zulipbot claim
2020-02-18T11:31:54
zulip/zulip
14,082
zulip__zulip-14082
[ "14080" ]
4fba227898d53e00cc078eaff8d178febd7eda31
diff --git a/zerver/lib/bugdown/__init__.py b/zerver/lib/bugdown/__init__.py --- a/zerver/lib/bugdown/__init__.py +++ b/zerver/lib/bugdown/__init__.py @@ -1690,7 +1690,8 @@ def handleMatch(self, m: Match[str]) -> Optional[Element]: # Also do the same for StreamTopicPattern. stream_url = encode_stream(stream['id'], name) el.set('href', '/#narrow/stream/{stream_url}'.format(stream_url=stream_url)) - el.text = '#{stream_name}'.format(stream_name=name) + text = '#{stream_name}'.format(stream_name=name) + el.text = markdown.util.AtomicString(text) return el return None @@ -1718,7 +1719,8 @@ def handleMatch(self, m: Match[str]) -> Optional[Element]: link = '/#narrow/stream/{stream_url}/topic/{topic_url}'.format(stream_url=stream_url, topic_url=topic_url) el.set('href', link) - el.text = '#{stream_name} > {topic_name}'.format(stream_name=stream_name, topic_name=topic_name) + text = '#{stream_name} > {topic_name}'.format(stream_name=stream_name, topic_name=topic_name) + el.text = markdown.util.AtomicString(text) return el return None
diff --git a/zerver/tests/test_bugdown.py b/zerver/tests/test_bugdown.py --- a/zerver/tests/test_bugdown.py +++ b/zerver/tests/test_bugdown.py @@ -1680,6 +1680,29 @@ def test_topic_single(self) -> None: d=denmark )) + def test_topic_atomic_string(self) -> None: + realm = get_realm('zulip') + # Create a linkifier. + sender_user_profile = self.example_user('othello') + url_format_string = r"https://trac.zulip.net/ticket/%(id)s" + realm_filter = RealmFilter(realm=realm, + pattern=r"#(?P<id>[0-9]{2,8})", + url_format_string=url_format_string) + realm_filter.save() + self.assertEqual( + realm_filter.__str__(), + '<RealmFilter(zulip): #(?P<id>[0-9]{2,8})' + ' https://trac.zulip.net/ticket/%(id)s>') + # Create a topic link that potentially interferes with the pattern. + denmark = get_stream('Denmark', realm) + msg = Message(sender=sender_user_profile, sending_client=get_client("test")) + content = "#**Denmark>#1234**" + self.assertEqual( + render_markdown(msg, content), + '<p><a class="stream-topic" data-stream-id="{d.id}" href="/#narrow/stream/{d.id}-Denmark/topic/.231234">#{d.name} &gt; #1234</a></p>'.format( + d=denmark + )) + def test_topic_multiple(self) -> None: denmark = get_stream('Denmark', get_realm('zulip')) scotland = get_stream('Scotland', get_realm('zulip')) @@ -1725,6 +1748,31 @@ def test_stream_unicode(self) -> None: href=href, )) + def test_stream_atomic_string(self) -> None: + realm = get_realm('zulip') + # Create a linkifier. + sender_user_profile = self.example_user('othello') + url_format_string = r"https://trac.zulip.net/ticket/%(id)s" + realm_filter = RealmFilter(realm=realm, + pattern=r"#(?P<id>[0-9]{2,8})", + url_format_string=url_format_string) + realm_filter.save() + self.assertEqual( + realm_filter.__str__(), + '<RealmFilter(zulip): #(?P<id>[0-9]{2,8})' + ' https://trac.zulip.net/ticket/%(id)s>') + # Create a stream that potentially interferes with the pattern. + stream = Stream.objects.create(name=u'Stream #1234', realm=realm) + msg = Message(sender=sender_user_profile, sending_client=get_client("test")) + content = u"#**Stream #1234**" + href = '/#narrow/stream/{stream_id}-Stream-.231234'.format(stream_id=stream.id) + self.assertEqual( + render_markdown(msg, content), + u'<p><a class="stream" data-stream-id="{s.id}" href="{href}">#{s.name}</a></p>'.format( + s=stream, + href=href, + )) + def test_stream_invalid(self) -> None: sender_user_profile = self.example_user('othello') msg = Message(sender=sender_user_profile, sending_client=get_client("test"))
Topic mentions shouldn't be processed by linkifier Steps to reproduce: 1. Add a linkifier pattern, let's use the example: ``#(?P<id>[0-9]+)`` -> ``https://github.com/zulip/zulip/issues/%(id)s`` 2. Create a topic that should get linkified, let's say ``#4871``. 3. Now mention the topic (``#**Denmark>#4871**``). What happens is that the ``#4871`` part of the rendered text links to ``https://github.com/zulip/zulip/issues/4871``, but the correct way is for the whole rendered text to be a simple link to the mentioned topic. ![image](https://user-images.githubusercontent.com/45007152/75631227-fc339280-5be8-11ea-9813-8fe0af63717c.png) @aero31aero Do you have time to look into this?
2020-03-01T19:25:18
zulip/zulip
14,091
zulip__zulip-14091
[ "13913" ]
cb85763c7870bd6c83e84e4e7bca0900810457e9
diff --git a/zerver/lib/rate_limiter.py b/zerver/lib/rate_limiter.py --- a/zerver/lib/rate_limiter.py +++ b/zerver/lib/rate_limiter.py @@ -32,12 +32,14 @@ class RateLimitedObject(ABC): def __init__(self, backend: Optional['Type[RateLimiterBackend]']=None) -> None: if backend is not None: self.backend = backend # type: Type[RateLimiterBackend] + elif settings.RUNNING_INSIDE_TORNADO: + self.backend = TornadoInMemoryRateLimiterBackend else: self.backend = RedisRateLimiterBackend def rate_limit(self) -> Tuple[bool, float]: # Returns (ratelimited, secs_to_freedom) - return self.backend.rate_limit_entity(self.key(), self.rules(), + return self.backend.rate_limit_entity(self.key(), self.get_rules(), self.max_api_calls(), self.max_api_window()) @@ -74,11 +76,11 @@ def clear_history(self) -> None: def max_api_calls(self) -> int: "Returns the API rate limit for the highest limit" - return self.rules()[-1][1] + return self.get_rules()[-1][1] def max_api_window(self) -> int: "Returns the API time window for the highest limit" - return self.rules()[-1][0] + return self.get_rules()[-1][0] def api_calls_left(self) -> Tuple[int, float]: """Returns how many API calls in this range this client has, as well as when @@ -87,6 +89,16 @@ def api_calls_left(self) -> Tuple[int, float]: max_calls = self.max_api_calls() return self.backend.get_api_calls_left(self.key(), max_window, max_calls) + def get_rules(self) -> List[Tuple[int, int]]: + """ + This is a simple wrapper meant to protect against having to deal with + an empty list of rules, as it would require fiddling with that special case + all around this system. "9999 max request per seconds" should be a good proxy + for "no rules". + """ + rules_list = self.rules() + return rules_list or [(1, 9999), ] + @abstractmethod def key(self) -> str: pass @@ -139,6 +151,7 @@ class RateLimiterBackend(ABC): @abstractmethod def block_access(cls, entity_key: str, seconds: int) -> None: "Manually blocks an entity for the desired number of seconds" + pass @classmethod @abstractmethod @@ -163,6 +176,120 @@ def rate_limit_entity(cls, entity_key: str, rules: List[Tuple[int, int]], # Returns (ratelimited, secs_to_freedom) pass +class TornadoInMemoryRateLimiterBackend(RateLimiterBackend): + # reset_times[rule][key] is the time at which the event + # request from the rate-limited key will be accepted. + reset_times = {} # type: Dict[Tuple[int, int], Dict[str, float]] + + # last_gc_time is the last time when the garbage was + # collected from reset_times for rule (time_window, max_count). + last_gc_time = {} # type: Dict[Tuple[int, int], float] + + # timestamps_blocked_until[key] contains the timestamp + # up to which the key has been blocked manually. + timestamps_blocked_until = {} # type: Dict[str, float] + + @classmethod + def _garbage_collect_for_rule(cls, now: float, time_window: int, max_count: int) -> None: + keys_to_delete = [] + reset_times_for_rule = cls.reset_times.get((time_window, max_count), None) + if reset_times_for_rule is None: + return + + keys_to_delete = [entity_key for entity_key in reset_times_for_rule + if reset_times_for_rule[entity_key] < now] + + for entity_key in keys_to_delete: + del reset_times_for_rule[entity_key] + + if not reset_times_for_rule: + del cls.reset_times[(time_window, max_count)] + + @classmethod + def need_to_limit(cls, entity_key: str, time_window: int, + max_count: int) -> Tuple[bool, float]: + ''' + Returns a tuple of `(rate_limited, time_till_free)`. + For simplicity, we have loosened the semantics here from + - each key may make atmost `count * (t / window)` request within any t + time interval. + to + - each key may make atmost `count * [(t / window) + 1]` request within + any t time interval. + Thus, we only need to store reset_times for each key which will be less + memory-intensive. This also has the advantage that you can only ever + lock yourself out completely for `window / count` seconds instead of + `window` seconds. + ''' + now = time.time() + + # Remove all timestamps from `reset_times` that are too old. + if cls.last_gc_time.get((time_window, max_count), 0) <= now - time_window / max_count: + cls.last_gc_time[(time_window, max_count)] = now + cls._garbage_collect_for_rule(now, time_window, max_count) + + reset_times_for_rule = cls.reset_times.setdefault((time_window, max_count), {}) + new_reset = max(reset_times_for_rule.get(entity_key, now), now) \ + + time_window / max_count + + if new_reset > now + time_window: + # Compute for how long the bucket will remain filled. + time_till_free = new_reset - time_window - now + return True, time_till_free + + reset_times_for_rule[entity_key] = new_reset + return False, 0.0 + + @classmethod + def get_api_calls_left(cls, entity_key: str, range_seconds: int, + max_calls: int) -> Tuple[int, float]: + now = time.time() + if (range_seconds, max_calls) in cls.reset_times and \ + entity_key in cls.reset_times[(range_seconds, max_calls)]: + reset_time = cls.reset_times[(range_seconds, max_calls)][entity_key] + else: + return max_calls, 0 + + calls_remaining = (now + range_seconds - reset_time) * max_calls // range_seconds + return int(calls_remaining), reset_time - now + + @classmethod + def block_access(cls, entity_key: str, seconds: int) -> None: + now = time.time() + cls.timestamps_blocked_until[entity_key] = now + seconds + + @classmethod + def unblock_access(cls, entity_key: str) -> None: + del cls.timestamps_blocked_until[entity_key] + + @classmethod + def clear_history(cls, entity_key: str) -> None: + for rule, reset_times_for_rule in cls.reset_times.items(): + reset_times_for_rule.pop(entity_key, None) + cls.timestamps_blocked_until.pop(entity_key, None) + + @classmethod + def rate_limit_entity(cls, entity_key: str, rules: List[Tuple[int, int]], + max_api_calls: int, max_api_window: int) -> Tuple[bool, float]: + now = time.time() + if entity_key in cls.timestamps_blocked_until: + # Check whether the key is manually blocked. + if now < cls.timestamps_blocked_until[entity_key]: + blocking_ttl = cls.timestamps_blocked_until[entity_key] - now + return True, blocking_ttl + else: + del cls.timestamps_blocked_until[entity_key] + + assert rules + for time_window, max_count in rules: + ratelimited, time_till_free = cls.need_to_limit(entity_key, time_window, max_count) + + if ratelimited: + statsd.incr("ratelimiter.limited.%s" % (entity_key,)) + break + + return ratelimited, time_till_free + class RedisRateLimiterBackend(RateLimiterBackend): @classmethod def get_keys(cls, entity_key: str) -> List[str]: @@ -220,11 +347,9 @@ def get_api_calls_left(cls, entity_key: str, range_seconds: int, @classmethod def is_ratelimited(cls, entity_key: str, rules: List[Tuple[int, int]]) -> Tuple[bool, float]: "Returns a tuple of (rate_limited, time_till_free)" + assert rules list_key, set_key, blocking_key = cls.get_keys(entity_key) - if len(rules) == 0: - return False, 0.0 - # Go through the rules from shortest to longest, # seeing if this user has violated any of them. First # get the timestamps for each nth items @@ -244,7 +369,7 @@ def is_ratelimited(cls, entity_key: str, rules: List[Tuple[int, int]]) -> Tuple[ if key_blocked is not None: # We are manually blocked. Report for how much longer we will be - if blocking_ttl_b is None: + if blocking_ttl_b is None: # nocoverage # defensive code, this should never happen blocking_ttl = 0.5 else: blocking_ttl = int(blocking_ttl_b) @@ -265,16 +390,11 @@ def is_ratelimited(cls, entity_key: str, rules: List[Tuple[int, int]]) -> Tuple[ return False, 0.0 @classmethod - def incr_ratelimit(cls, entity_key: str, rules: List[Tuple[int, int]], - max_api_calls: int, max_api_window: int) -> None: + def incr_ratelimit(cls, entity_key: str, max_api_calls: int, max_api_window: int) -> None: """Increases the rate-limit for the specified entity""" list_key, set_key, _ = cls.get_keys(entity_key) now = time.time() - # If we have no rules, we don't store anything - if len(rules) == 0: - return - # Start redis transaction with client.pipeline() as pipe: count = 0 @@ -316,7 +436,7 @@ def incr_ratelimit(cls, entity_key: str, rules: List[Tuple[int, int]], # If no exception was raised in the execution, there were no transaction conflicts break - except redis.WatchError: + except redis.WatchError: # nocoverage # Ideally we'd have a test for this. if count > 10: raise RateLimiterLockingException() count += 1 @@ -333,7 +453,7 @@ def rate_limit_entity(cls, entity_key: str, rules: List[Tuple[int, int]], else: try: - cls.incr_ratelimit(entity_key, rules, max_api_calls, max_api_window) + cls.incr_ratelimit(entity_key, max_api_calls, max_api_window) except RateLimiterLockingException: logger.warning("Deadlock trying to incr_ratelimit for %s" % (entity_key,)) # rate-limit users who are hitting the API so hard we can't update our stats.
diff --git a/zerver/tests/test_rate_limiter.py b/zerver/tests/test_rate_limiter.py --- a/zerver/tests/test_rate_limiter.py +++ b/zerver/tests/test_rate_limiter.py @@ -5,6 +5,7 @@ RateLimitedUser, RateLimiterBackend, RedisRateLimiterBackend, + TornadoInMemoryRateLimiterBackend, ) from zerver.lib.test_classes import ZulipTestCase @@ -68,7 +69,7 @@ def verify_api_calls_left(self, obj: RateLimitedTestObject) -> None: self.assertEqual(expected_time_till_reset, time_till_reset) def expected_api_calls_left(self, obj: RateLimitedTestObject, now: float) -> Tuple[int, float]: - longest_rule = obj.rules()[-1] + longest_rule = obj.get_rules()[-1] max_window, max_calls = longest_rule history = self.requests_record.get(obj.key()) if history is None: @@ -82,7 +83,7 @@ def api_calls_left_from_history(self, history: List[float], max_window: int, """ This depends on the algorithm used in the backend, and should be defined by the test class. """ - raise NotImplementedError # nocoverage + raise NotImplementedError() def test_hit_ratelimits(self) -> None: obj = self.create_object('test', [(2, 3), ]) @@ -165,13 +166,45 @@ def test_block_access(self) -> None: obj.block_access(1) self.make_request(obj, expect_ratelimited=True, verify_api_calls_left=False) -class RateLimitedUserTest(ZulipTestCase): +class TornadoInMemoryRateLimiterBackendTest(RateLimiterBackendBase): + __unittest_skip__ = False + backend = TornadoInMemoryRateLimiterBackend + + def api_calls_left_from_history(self, history: List[float], max_window: int, + max_calls: int, now: float) -> Tuple[int, float]: + reset_time = 0.0 + for timestamp in history: + reset_time = max(reset_time, timestamp) + (max_window / max_calls) + + calls_left = (now + max_window - reset_time) * max_calls // max_window + calls_left = int(calls_left) + + return calls_left, reset_time - now + + def test_used_in_tornado(self) -> None: + user_profile = self.example_user("hamlet") + with self.settings(RUNNING_INSIDE_TORNADO=True): + obj = RateLimitedUser(user_profile) + self.assertEqual(obj.backend, TornadoInMemoryRateLimiterBackend) + + def test_block_access(self) -> None: + obj = self.create_object('test', [(2, 5), ]) + start_time = time.time() + + obj.block_access(1) + with mock.patch('time.time', return_value=(start_time)): + self.make_request(obj, expect_ratelimited=True, verify_api_calls_left=False) + + with mock.patch('time.time', return_value=(start_time + 1.01)): + self.make_request(obj, expect_ratelimited=False, verify_api_calls_left=False) + +class RateLimitedObjectsTest(ZulipTestCase): def test_user_rate_limits(self) -> None: user_profile = self.example_user("hamlet") user_profile.rate_limits = "1:3,2:4" obj = RateLimitedUser(user_profile) - self.assertEqual(obj.rules(), [(1, 3), (2, 4)]) + self.assertEqual(obj.get_rules(), [(1, 3), (2, 4)]) def test_add_remove_rule(self) -> None: user_profile = self.example_user("hamlet") @@ -180,9 +213,13 @@ def test_add_remove_rule(self) -> None: add_ratelimit_rule(10, 100, domain='some_new_domain') obj = RateLimitedUser(user_profile) - self.assertEqual(obj.rules(), [(1, 2), ]) + self.assertEqual(obj.get_rules(), [(1, 2), ]) obj.domain = 'some_new_domain' - self.assertEqual(obj.rules(), [(4, 5), (10, 100)]) + self.assertEqual(obj.get_rules(), [(4, 5), (10, 100)]) remove_ratelimit_rule(10, 100, domain='some_new_domain') - self.assertEqual(obj.rules(), [(4, 5), ]) + self.assertEqual(obj.get_rules(), [(4, 5), ]) + + def test_empty_rules_edge_case(self) -> None: + obj = RateLimitedTestObject("test", rules=[], backend=RedisRateLimiterBackend) + self.assertEqual(obj.get_rules(), [(1, 9999), ])
Optimize rate_limiter performance for get_events queries See https://chat.zulip.org/#narrow/stream/3-backend/topic/profiling.20get_events/near/816860 for profiling details, but basically, currently a get_events request spends 1.4ms/request talking to redis for our rate limiter, which is somewhere between 15% and 50% of the total request runtime (my measurement technique is susceptible to issues like the first request on a code path being extra expensive). Since get_events is our most scalability-critical endpoint, this is a big deal. We should do some rethinking of the redis internals for our rate limiter. I have a few ideas: * Writing an alternative rate-limiter implementation for `get_events `specifically that's entirely in-process and would be basically instant. Since the Tornado system has a relatively strong constraint that a given user always connect to the same server, this might be fairly cheap to implement and would bring that 1.4ms to probably 50us or less. (And gate it on `RUNNING_INSIDE_TORNADO`). * Look at rewriting our redis transactions to be more efficient for the highest-traffic cases (E.g. user is not close to limit, or user is way over limit). E.g. maybe `incr_rateimit` should automatically return the `api_calls_left` result rather than requiring 2 transactions. * Looking at https://github.com/popravich/python-redis-benchmark, there may be some alternative async IO redis clients we could consider migrating to, and possibly some that are just faster. Given how little code we have interacting with redis directly, this might be an easy port to do; I'm not sure whether or not it would help. (And unlike the in-process hack approach, this would have side benefits to non-Tornado endpoints).
Hello @zulip/server-production members, this issue was labeled with the "area: production" label, so you may want to check it out! <!-- areaLabelAddition --> @zulipbot claim
2020-03-02T23:59:47
zulip/zulip
14,270
zulip__zulip-14270
[ "14264" ]
f9db77c4002e77b75f15a55115748a5f9c2c7963
diff --git a/zerver/views/development/integrations.py b/zerver/views/development/integrations.py --- a/zerver/views/development/integrations.py +++ b/zerver/views/development/integrations.py @@ -70,13 +70,13 @@ def get_fixtures(request: HttpResponse, headers_raw = get_fixture_http_headers(integration_name, "".join(fixture.split(".")[:-1])) - headers = {} - for header in headers_raw: + + def fix_name(header: str) -> str: if header.startswith("HTTP_"): # HTTP_ is a prefix intended for Django. - headers[header.lstrip("HTTP_")] = headers_raw[header] - else: - headers[header] = headers_raw[header] + return header[len("HTTP_"):] + return header + headers = {fix_name(k): v for k, v in headers_raw.items()} fixtures[fixture] = {"body": body, "headers": headers} return json_success({"fixtures": fixtures}) diff --git a/zerver/views/documentation.py b/zerver/views/documentation.py --- a/zerver/views/documentation.py +++ b/zerver/views/documentation.py @@ -107,7 +107,7 @@ def get_context_data(self, **kwargs: Any) -> Dict[str, Any]: with open(article_path) as article_file: first_line = article_file.readlines()[0] # Strip the header and then use the first line to get the article title - article_title = first_line.strip().lstrip("# ") + article_title = first_line.lstrip("#").strip() if context["not_index_page"]: context["OPEN_GRAPH_TITLE"] = "%s (%s)" % (article_title, title_base) else:
Zulip improperly uses `lstrip` to remove prefix in development integrations Hello, While [scanning a bunch of code](https://app.r2c.dev), I came across https://github.com/zulip/zulip/blob/master/zerver/views/development/integrations.py#L76 which uses `lstrip` to remove a prefix from HTTP headers. `lstrip` is misleading -- `lstrip` removes a **set**, not a **sequence**, of characters from the left side of the string. (https://docs.python.org/3/library/stdtypes.html#str.lstrip) You can see the behavior of `lstrip` below: ```python >>> tests = ( ... "HTTP_HEADER", ... "HTP_PADDING", ... "TPS_REPORT", ... "SAFE_STRING", ... "__PRIVATE_VAR", ... "PTHPHPTPPHPTPHPT___REMAIN" ... ) >>> for test in tests: ... print(test.lstrip("HTTP_")) ... EADER #Yikes ADDING S_REPORT SAFE_STRING RIVATE_VAR REMAIN >>> ``` Surprisingly, a quick Google search shows that there is no standard method of removing a prefix in Python. [This post](https://stackoverflow.com/a/16892491) on Stack Overflow offers the following one-liner as a fix: ```python text[text.startswith(prefix) and len(prefix):] ``` Anyway, I hope this helps! If you prefer, I can open a pull request to patch the issue, but I figured someone else would have the development and testing environments already set up and could fix it faster than I could. :) Thanks!
2020-03-20T17:35:48
zulip/zulip
14,275
zulip__zulip-14275
[ "14025" ]
6df86dab3ef442b3e153a69f8133c7911b519d14
diff --git a/scripts/lib/setup_path.py b/scripts/lib/setup_path.py --- a/scripts/lib/setup_path.py +++ b/scripts/lib/setup_path.py @@ -12,5 +12,8 @@ def setup_path() -> None: activate_this = os.path.join(venv, "bin", "activate_this.py") activate_locals = dict(__file__=activate_this) exec(open(activate_this).read(), activate_locals) - if not os.path.exists(activate_locals["site_packages"]): + # Check that the python version running this function + # is same as python version that created the virtualenv. + python_version = "python{}.{}".format(*sys.version_info[:2]) + if not os.path.exists(os.path.join(venv, 'lib', python_version)): raise RuntimeError(venv + " was not set up for this Python version")
virtualenv 20.0.1 breaks zulip installer When virtualenv 20.0.1 is installed on a system, the zulip installer fails to complete due to an error in `scripts/lib/setup_path_on_import.py`: ``` Successfully installed Yarn 1.19.1! + mkdir -p /etc/zulip + cat + '[' -n '' ']' + dpkg-query --showformat '${Status}\n' -W rabbitmq-server + grep -vq ' not-installed$' + cat + '[' -n '' ']' + case ",$PUPPET_CLASSES," in + '[' -z '' ']' + cp -a /tmp/tmp.yliyQLjARE/zulip-server-2.1.2/zproject/prod_settings_template.py /etc/zulip/settings.py + '[' -n zulip ']' + sed -i 's/^EXTERNAL_HOST =.*/EXTERNAL_HOST = '\''zulip'\''/' /etc/zulip/settings.py + '[' -n [email protected] ']' + sed -i 's/^ZULIP_ADMINISTRATOR =.*/ZULIP_ADMINISTRATOR = '\''[email protected]'\''/' /etc/zulip/settings.py + ln -nsf /etc/zulip/settings.py /tmp/tmp.yliyQLjARE/zulip-server-2.1.2/zproject/prod_settings.py + /tmp/tmp.yliyQLjARE/zulip-server-2.1.2/scripts/setup/generate_secrets.py --production Traceback (most recent call last): File "/tmp/tmp.yliyQLjARE/zulip-server-2.1.2/scripts/setup/generate_secrets.py", line 11, in <module> import scripts.lib.setup_path_on_import File "/tmp/tmp.yliyQLjARE/zulip-server-2.1.2/scripts/lib/setup_path_on_import.py", line 14, in <module> if not os.path.exists(activate_locals["site_packages"]): KeyError: 'site_packages' Zulip installation failed (exit code 1)! The install process is designed to be idempotent, so you can retry after resolving whatever issue caused the failure (there should be a traceback above). A log of this installation is available in /var/log/zulip/install.log root@zulip-2001:/tmp/tmp.yliyQLjARE/zulip-server-2.1.2# ``` Something about how the virtualenv at `/srv/zulip-venv-cache/$hash` is being created is causing issues (note that I was able to recreate this with Zulip 2.0.7 as well). ## Steps to recreate I provisioned an Ubuntu 18.04 container with LXD and performed the following: 1. `pip3 install virtualenv==20.0.1`, then followed the [installation guide](https://zulip.readthedocs.io/en/stable/production/install.html#installer-details): 2. `cd $(mktemp -d); wget https://www.zulip.org/dist/releases/zulip-server-latest.tar.gz; tar -xf zulip-server-latest.tar.gz` 3. `zulip-server-*/scripts/setup/install --self-signed-cert [email protected] --hostname=zulip` When version 20.0.1 is substituted with 15.1.0 or 16.7.10 on an identical container, the install process finishes without any errors. If virtualenv 20.0.1 isn't supported, I would think a check in the installer to ensure the correct version (or a note on the installation page) would be useful, since I imagine it would be common for people to want the latest version of virtualenv installed on systems. I can provide more logs if needed. Thanks!
Certainly sounds like a bug. @andersk can you look into this? Hello @zulip/server-production members, this issue was labeled with the "area: production installer" label, so you may want to check it out! <!-- areaLabelAddition --> @zulipbot claim @timabbott @andersk This is so because in virtualenv>20.0.0 [activate_this.py](https://github.com/pypa/virtualenv/blob/master/src/virtualenv/activation/python/activate_this.py) is modified and it now does not use the variable` 'site_package'` thereby giving key error. I guess we have to use 'path' instead of 'site_package' or any other method to verify that the virtual environment is set up or not.
2020-03-20T22:50:56
zulip/zulip
14,385
zulip__zulip-14385
[ "14384" ]
12474a3deb5f898009fccba3d43299ca1660015b
diff --git a/zerver/lib/message.py b/zerver/lib/message.py --- a/zerver/lib/message.py +++ b/zerver/lib/message.py @@ -2,6 +2,7 @@ import ujson import zlib import ahocorasick +import copy from django.utils.translation import ugettext as _ from django.utils.timezone import now as timezone_now @@ -190,17 +191,46 @@ def wide_dict(message: Message) -> Dict[str, Any]: @staticmethod def post_process_dicts(objs: List[Dict[str, Any]], apply_markdown: bool, client_gravatar: bool) -> None: + ''' + NOTE: This function mutates the objects in + the `objs` list, rather than making + shallow copies. It might be safer to + make shallow copies here, but performance + is somewhat important here, as we are + often fetching several messages. + ''' MessageDict.bulk_hydrate_sender_info(objs) MessageDict.bulk_hydrate_recipient_info(objs) for obj in objs: - MessageDict.finalize_payload(obj, apply_markdown, client_gravatar) + MessageDict._finalize_payload(obj, apply_markdown, client_gravatar) @staticmethod def finalize_payload(obj: Dict[str, Any], apply_markdown: bool, client_gravatar: bool, - keep_rendered_content: bool=False) -> None: + keep_rendered_content: bool=False) -> Dict[str, Any]: + ''' + Make a shallow copy of the incoming dict to avoid + mutation-related bugs. This function is often + called when we're sending out message events to + multiple clients, who often want the final dictionary + to have different shapes here based on the parameters. + ''' + new_obj = copy.copy(obj) + + # Next call our worker, which mutates the record in place. + MessageDict._finalize_payload( + new_obj, + apply_markdown=apply_markdown, + client_gravatar=client_gravatar, + keep_rendered_content=keep_rendered_content + ) + return new_obj + + @staticmethod + def _finalize_payload(obj: Dict[str, Any], apply_markdown: bool, client_gravatar: bool, + keep_rendered_content: bool=False) -> None: MessageDict.set_sender_avatar(obj, client_gravatar) if apply_markdown: obj['content_type'] = 'text/html' diff --git a/zerver/lib/outgoing_webhook.py b/zerver/lib/outgoing_webhook.py --- a/zerver/lib/outgoing_webhook.py +++ b/zerver/lib/outgoing_webhook.py @@ -28,15 +28,24 @@ def __init__(self, token: str, user_profile: UserProfile, service_name: str) -> class GenericOutgoingWebhookService(OutgoingWebhookServiceInterface): def build_bot_request(self, event: Dict[str, Any]) -> Optional[Any]: - # Because we don't have a place for the recipient of an - # outgoing webhook to indicate whether it wants the raw - # Markdown or the rendered HTML, we leave both the content and - # rendered_content fields in the message payload. - MessageDict.finalize_payload(event['message'], False, False, - keep_rendered_content=True) + ''' + We send a simple version of the message to outgoing + webhooks, since most of them really only need + `content` and a few other fields. We may eventually + allow certain bots to get more information, but + that's not a high priority. We do send the gravatar + info to the clients (so they don't have to compute + it themselves). + ''' + message_dict = MessageDict.finalize_payload( + event['message'], + apply_markdown=False, + client_gravatar=False, + keep_rendered_content=True + ) request_data = {"data": event['command'], - "message": event['message'], + "message": message_dict, "bot_email": self.user_profile.email, "token": self.token, "trigger": event['trigger']} @@ -53,8 +62,7 @@ def send_data_to_server(self, response = requests.request('POST', base_url, data=request_data, headers=headers) return response - def process_success(self, response_json: Dict[str, Any], - event: Dict[str, Any]) -> Optional[Dict[str, Any]]: + def process_success(self, response_json: Dict[str, Any]) -> Optional[Dict[str, Any]]: if "response_not_required" in response_json and response_json['response_not_required']: return None @@ -102,8 +110,7 @@ def send_data_to_server(self, response = requests.request('POST', base_url, data=request_data) return response - def process_success(self, response_json: Dict[str, Any], - event: Dict[str, Any]) -> Optional[Dict[str, Any]]: + def process_success(self, response_json: Dict[str, Any]) -> Optional[Dict[str, Any]]: if "text" in response_json: content = response_json['text'] success_data = dict(content=content) @@ -251,7 +258,7 @@ def process_success_response(event: Dict[str, Any], fail_with_message(event, "Invalid JSON in response") return - success_data = service_handler.process_success(response_json, event) + success_data = service_handler.process_success(response_json) if success_data is None: return diff --git a/zerver/tornado/event_queue.py b/zerver/tornado/event_queue.py --- a/zerver/tornado/event_queue.py +++ b/zerver/tornado/event_queue.py @@ -824,6 +824,15 @@ def process_message_event(event_template: Mapping[str, Any], users: Iterable[Map presence_idle_user_ids = set(event_template.get('presence_idle_user_ids', [])) wide_dict = event_template['message_dict'] # type: Dict[str, Any] + # Temporary transitional code: Zulip servers that have message + # events in their event queues and upgrade to the new version + # that expects sender_delivery_email in these events will + # throw errors processing events. We can remove this block + # once we don't expect anyone to be directly upgrading from + # 2.0.x to the latest Zulip. + if 'sender_delivery_email' not in wide_dict: # nocoverage + wide_dict['sender_delivery_email'] = wide_dict['sender_email'] + sender_id = wide_dict['sender_id'] # type: int message_id = wide_dict['id'] # type: int message_type = wide_dict['type'] # type: str @@ -831,19 +840,11 @@ def process_message_event(event_template: Mapping[str, Any], users: Iterable[Map @cachify def get_client_payload(apply_markdown: bool, client_gravatar: bool) -> Dict[str, Any]: - dct = copy.deepcopy(wide_dict) - - # Temporary transitional code: Zulip servers that have message - # events in their event queues and upgrade to the new version - # that expects sender_delivery_email in these events will - # throw errors processing events. We can remove this block - # once we don't expect anyone to be directly upgrading from - # 2.0.x to the latest Zulip. - if 'sender_delivery_email' not in dct: # nocoverage - dct['sender_delivery_email'] = dct['sender_email'] - - MessageDict.finalize_payload(dct, apply_markdown, client_gravatar) - return dct + return MessageDict.finalize_payload( + wide_dict, + apply_markdown=apply_markdown, + client_gravatar=client_gravatar + ) # Extra user-specific data to include extra_user_data = {} # type: Dict[int, Any]
diff --git a/zerver/tests/test_messages.py b/zerver/tests/test_messages.py --- a/zerver/tests/test_messages.py +++ b/zerver/tests/test_messages.py @@ -1,6 +1,4 @@ # -*- coding: utf-8 -*- -import copy - from django.db import IntegrityError from django.db.models import Q from django.conf import settings @@ -1302,12 +1300,8 @@ def get_send_message_payload( msg = reload_message(msg_id) wide_dict = MessageDict.wide_dict(msg) - # TODO: Have finalize_payload make this - # copy for us, rather than mutating - # in place. - narrow_dict = copy.copy(wide_dict) - MessageDict.finalize_payload( - narrow_dict, + narrow_dict = MessageDict.finalize_payload( + wide_dict, apply_markdown=apply_markdown, client_gravatar=client_gravatar, ) diff --git a/zerver/tests/test_narrow.py b/zerver/tests/test_narrow.py --- a/zerver/tests/test_narrow.py +++ b/zerver/tests/test_narrow.py @@ -2416,9 +2416,13 @@ def test_message_without_rendered_content(self) -> None: m = self.get_last_message() m.rendered_content = m.rendered_content_version = None m.content = 'test content' - d = MessageDict.wide_dict(m) - MessageDict.finalize_payload(d, apply_markdown=True, client_gravatar=False) - self.assertEqual(d['content'], '<p>test content</p>') + wide_dict = MessageDict.wide_dict(m) + final_dict = MessageDict.finalize_payload( + wide_dict, + apply_markdown=True, + client_gravatar=False, + ) + self.assertEqual(final_dict['content'], '<p>test content</p>') def common_check_get_messages_query(self, query_params: Dict[str, object], expected: str) -> None: user_profile = self.example_user('hamlet') diff --git a/zerver/tests/test_outgoing_webhook_interfaces.py b/zerver/tests/test_outgoing_webhook_interfaces.py --- a/zerver/tests/test_outgoing_webhook_interfaces.py +++ b/zerver/tests/test_outgoing_webhook_interfaces.py @@ -5,37 +5,34 @@ import json import requests +from zerver.lib.avatar import get_gravatar_url from zerver.lib.message import MessageDict from zerver.lib.outgoing_webhook import ( get_service_interface_class, process_success_response, ) from zerver.lib.test_classes import ZulipTestCase +from zerver.lib.timestamp import datetime_to_timestamp from zerver.lib.topic import TOPIC_NAME -from zerver.models import get_realm, get_user, SLACK_INTERFACE, Message +from zerver.models import ( + get_realm, + get_stream, + get_user, + Message, + SLACK_INTERFACE, +) + class TestGenericOutgoingWebhookService(ZulipTestCase): def setUp(self) -> None: super().setUp() - # TODO: Ideally, this test would use the full flow, rather - # than making a mock message like this. - message_id = self.send_stream_message(self.example_user('othello'), - "Denmark", content="@**test**") - message = Message.objects.get(id=message_id) - wide_message_dict = MessageDict.wide_dict(message) - - self.event = { - u'command': '@**test**', - u'message': wide_message_dict, - u'trigger': 'mention', - } - self.bot_user = get_user("[email protected]", get_realm("zulip")) + bot_user = get_user("[email protected]", get_realm("zulip")) service_class = get_service_interface_class('whatever') # GenericOutgoingWebhookService self.handler = service_class(service_name='test-service', token='abcdef', - user_profile=self.bot_user) + user_profile=bot_user) def test_process_success_response(self) -> None: class Stub: @@ -72,19 +69,69 @@ def make_response(text: str) -> requests.Response: self.assertTrue(m.called) def test_build_bot_request(self) -> None: - request_data = self.handler.build_bot_request(self.event) + othello = self.example_user('othello') + stream = get_stream('Denmark', othello.realm) + message_id = self.send_stream_message( + othello, + stream.name, + content="@**test**" + ) + + message = Message.objects.get(id=message_id) + + gravatar_url = get_gravatar_url( + othello.delivery_email, + othello.avatar_version, + ) + + expected_message_data = { + 'avatar_url': gravatar_url, + 'client': 'test suite', + 'content': '@**test**', + 'content_type': 'text/x-markdown', + 'display_recipient': 'Denmark', + 'id': message.id, + 'is_me_message': False, + 'reactions': [], + 'recipient_id': message.recipient_id, + 'rendered_content': '<p>@<strong>test</strong></p>', + 'sender_email': othello.email, + 'sender_full_name': 'Othello, the Moor of Venice', + 'sender_id': othello.id, + 'sender_realm_str': 'zulip', + 'sender_short_name': 'othello', + 'stream_id': stream.id, + TOPIC_NAME: 'test', + 'submessages': [], + 'timestamp': datetime_to_timestamp(message.date_sent), + 'topic_links': [], + 'type': 'stream', + } + + wide_message_dict = MessageDict.wide_dict(message) + + event = { + u'command': '@**test**', + u'message': wide_message_dict, + u'trigger': 'mention', + } + + request_data = self.handler.build_bot_request(event) request_data = json.loads(request_data) self.assertEqual(request_data['data'], "@**test**") self.assertEqual(request_data['token'], "abcdef") - self.assertEqual(request_data['message'], self.event['message']) + self.assertEqual(request_data['message'], expected_message_data) + + # Make sure we didn't accidentally mutate wide_message_dict. + self.assertEqual(wide_message_dict['sender_realm_id'], othello.realm_id) def test_process_success(self) -> None: response = dict(response_not_required=True) # type: Dict[str, Any] - success_response = self.handler.process_success(response, self.event) + success_response = self.handler.process_success(response) self.assertEqual(success_response, None) response = dict(response_string='test_content') - success_response = self.handler.process_success(response, self.event) + success_response = self.handler.process_success(response) self.assertEqual(success_response, dict(content='test_content')) response = dict( @@ -92,7 +139,7 @@ def test_process_success(self) -> None: widget_content='test_widget_content', red_herring='whatever', ) - success_response = self.handler.process_success(response, self.event) + success_response = self.handler.process_success(response) expected_response = dict( content='test_content', widget_content='test_widget_content', @@ -100,7 +147,7 @@ def test_process_success(self) -> None: self.assertEqual(success_response, expected_response) response = dict() - success_response = self.handler.process_success(response, self.event) + success_response = self.handler.process_success(response) self.assertEqual(success_response, None) class TestSlackOutgoingWebhookService(ZulipTestCase): @@ -172,9 +219,9 @@ def test_build_bot_request_private_message(self, mock_fail_with_message: mock.Mo def test_process_success(self) -> None: response = dict(response_not_required=True) # type: Dict[str, Any] - success_response = self.handler.process_success(response, self.stream_message_event) + success_response = self.handler.process_success(response) self.assertEqual(success_response, None) response = dict(text='test_content') - success_response = self.handler.process_success(response, self.stream_message_event) + success_response = self.handler.process_success(response) self.assertEqual(success_response, dict(content='test_content')) diff --git a/zerver/tests/test_outgoing_webhook_system.py b/zerver/tests/test_outgoing_webhook_system.py --- a/zerver/tests/test_outgoing_webhook_system.py +++ b/zerver/tests/test_outgoing_webhook_system.py @@ -7,6 +7,10 @@ from typing import Any, Optional +from zerver.lib.actions import ( + do_create_user, +) + from zerver.lib.outgoing_webhook import ( do_rest_call, GenericOutgoingWebhookService, @@ -15,7 +19,15 @@ from zerver.lib.test_classes import ZulipTestCase from zerver.lib.topic import TOPIC_NAME -from zerver.models import get_realm, get_user, UserProfile, get_display_recipient +from zerver.lib.users import add_service +from zerver.models import ( + get_display_recipient, + get_realm, + get_user, + Recipient, + Service, + UserProfile, +) from version import ZULIP_VERSION @@ -139,36 +151,103 @@ def test_request_exception(self, mock_fail_with_message: mock.Mock, self.assertEqual(bot_owner_notification.recipient_id, self.bot_user.bot_owner.id) class TestOutgoingWebhookMessaging(ZulipTestCase): - def setUp(self) -> None: - super().setUp() - self.user_profile = self.example_user("othello") - self.bot_profile = self.create_test_bot('outgoing-webhook', self.user_profile, - full_name='Outgoing Webhook bot', - bot_type=UserProfile.OUTGOING_WEBHOOK_BOT, - service_name='foo-service') + def create_outgoing_bot(self, bot_owner: UserProfile) -> UserProfile: + return self.create_test_bot( + 'outgoing-webhook', + bot_owner, + full_name='Outgoing Webhook bot', + bot_type=UserProfile.OUTGOING_WEBHOOK_BOT, + service_name='foo-service' + ) + + def test_multiple_services(self) -> None: + bot_owner = self.example_user("othello") + + bot = do_create_user( + bot_owner=bot_owner, + bot_type=UserProfile.OUTGOING_WEBHOOK_BOT, + full_name='Outgoing Webhook Bot', + email='whatever', + realm=bot_owner.realm, + short_name='', + password=None, + ) + + add_service( + 'weather', + user_profile=bot, + interface=Service.GENERIC, + base_url='weather_url', + token='weather_token', + ) + + add_service( + 'qotd', + user_profile=bot, + interface=Service.GENERIC, + base_url='qotd_url', + token='qotd_token', + ) + + sender = self.example_user("hamlet") + + with mock.patch('zerver.worker.queue_processors.do_rest_call') as m: + self.send_personal_message( + sender, + bot, + content="some content" + ) + + url_token_tups = set() + for item in m.call_args_list: + args = item[0] + base_url = args[0] + request_data = ujson.loads(args[1]) + tup = (base_url, request_data['token']) + url_token_tups.add(tup) + message_data = request_data['message'] + self.assertEqual(message_data['content'], 'some content') + self.assertEqual(message_data['sender_id'], sender.id) + + self.assertEqual( + url_token_tups, + { + ('weather_url', 'weather_token'), + ('qotd_url', 'qotd_token'), + } + ) @mock.patch('requests.request', return_value=ResponseMock(200, {"response_string": "Hidley ho, I'm a webhook responding!"})) def test_pm_to_outgoing_webhook_bot(self, mock_requests_request: mock.Mock) -> None: - self.send_personal_message(self.user_profile, self.bot_profile, + bot_owner = self.example_user("othello") + bot = self.create_outgoing_bot(bot_owner) + sender = self.example_user("hamlet") + + self.send_personal_message(sender, bot, content="foo") last_message = self.get_last_message() self.assertEqual(last_message.content, "Hidley ho, I'm a webhook responding!") - self.assertEqual(last_message.sender_id, self.bot_profile.id) - display_recipient = get_display_recipient(last_message.recipient) - # The next two lines error on mypy because the display_recipient is of type Union[str, List[Dict[str, Any]]]. - # In this case, we know that display_recipient will be of type List[Dict[str, Any]]. - # Otherwise this test will error, which is wanted behavior anyway. - self.assert_length(display_recipient, 1) # type: ignore - self.assertEqual(display_recipient[0]['email'], self.user_profile.email) # type: ignore + self.assertEqual(last_message.sender_id, bot.id) + self.assertEqual( + last_message.recipient.type_id, + sender.id + ) + self.assertEqual( + last_message.recipient.type, + Recipient.PERSONAL + ) @mock.patch('requests.request', return_value=ResponseMock(200, {"response_string": "Hidley ho, I'm a webhook responding!"})) def test_stream_message_to_outgoing_webhook_bot(self, mock_requests_request: mock.Mock) -> None: - self.send_stream_message(self.user_profile, "Denmark", - content="@**{}** foo".format(self.bot_profile.full_name), + bot_owner = self.example_user("othello") + bot = self.create_outgoing_bot(bot_owner) + + self.send_stream_message(bot_owner, "Denmark", + content="@**{}** foo".format(bot.full_name), topic_name="bar") last_message = self.get_last_message() self.assertEqual(last_message.content, "Hidley ho, I'm a webhook responding!") - self.assertEqual(last_message.sender_id, self.bot_profile.id) + self.assertEqual(last_message.sender_id, bot.id) self.assertEqual(last_message.topic_name(), "bar") display_recipient = get_display_recipient(last_message.recipient) self.assertEqual(display_recipient, "Denmark")
outgoing bots: Prevent bugs with multiple services (sender_realm_id) Thanks to @Udit107710 we discovered that if an outgoing webhook bot supported multiple services, we would have errors when we tried to deliver messages to multiple services. This was probably never a real world problem, since our current UI only makes it easy to set up one service per bot, even though the DB theoretically supports multiple services. Also, we never heard of folks in the outside world trying to do this. The root cause of the problem was that `finalize_payload` was mutating a dictionary for the first service we called, and then a field called `sender_realm_id` would be missing.
2020-03-28T13:26:38
zulip/zulip
14,501
zulip__zulip-14501
[ "14483" ]
c4589718fc60e093109ed627a57233bc911f9d1b
diff --git a/zerver/models.py b/zerver/models.py --- a/zerver/models.py +++ b/zerver/models.py @@ -301,6 +301,10 @@ class Realm(models.Model): upload_quota_gb = models.IntegerField(null=True) # type: Optional[int] VIDEO_CHAT_PROVIDERS = { + 'disabled': { + 'name': u"None", + 'id': 0 + }, 'jitsi_meet': { 'name': u"Jitsi Meet", 'id': 1
diff --git a/frontend_tests/node_tests/compose.js b/frontend_tests/node_tests/compose.js --- a/frontend_tests/node_tests/compose.js +++ b/frontend_tests/node_tests/compose.js @@ -79,6 +79,22 @@ people.small_avatar_url_for_person = function () { return 'http://example.com/example.png'; }; +function stub_out_video_calls() { + const elem = $("#below-compose-content .video_link"); + elem.toggle = (show) => { + if (show) { + elem.show(); + } else { + elem.hide(); + } + }; +} + +function reset_jquery() { + // Avoid leaks. + set_global('$', global.make_zjquery()); +} + const new_user = { email: '[email protected]', user_id: 101, @@ -912,6 +928,29 @@ run_test('initialize', () => { }; }; + page_params.realm_available_video_chat_providers = { + disabled: { + id: 0, + name: "disabled", + }, + jitsi_meet: { + id: 1, + name: "Jitsi Meet", + }, + google_hangouts: { + id: 2, + name: "Google Hangouts", + }, + zoom: { + id: 3, + name: "Zoom", + }, + }; + + page_params.realm_video_chat_provider = + page_params.realm_available_video_chat_providers.disabled.id; + + stub_out_video_calls(); compose.initialize(); assert(resize_watch_manual_resize_checked); @@ -919,11 +958,6 @@ run_test('initialize', () => { assert(!$("#compose #attach_files").hasClass("notdisplayed")); assert(setup_upload_called); - function reset_jquery() { - // Avoid leaks. - set_global('$', global.make_zjquery()); - } - let compose_actions_start_checked; function set_up_compose_start_mock(expected_opts) { @@ -942,6 +976,7 @@ run_test('initialize', () => { page_params.narrow = true; reset_jquery(); + stub_out_video_calls(); set_up_compose_start_mock({}); compose.initialize(); @@ -953,6 +988,7 @@ run_test('initialize', () => { page_params.narrow_topic = 'testing'; reset_jquery(); + stub_out_video_calls(); set_up_compose_start_mock({topic: 'testing'}); compose.initialize(); @@ -964,6 +1000,7 @@ run_test('initialize', () => { $("#compose-send-button").attr('disabled', 'disabled'); reset_jquery(); + stub_out_video_calls(); compose.initialize(); compose.abort_xhr(); @@ -1381,16 +1418,6 @@ run_test('on_events', () => { to_$: () => textarea, }, }; - page_params.realm_available_video_chat_providers = { - google_hangouts: { - id: 2, - name: "Google Hangouts", - }, - zoom: { - id: 3, - name: "Zoom", - }, - }; compose_ui.insert_syntax_and_focus = function (syntax) { syntax_to_insert = syntax; @@ -1401,11 +1428,21 @@ run_test('on_events', () => { $('#compose-textarea').val(''); handler(ev); + assert(!called); + + page_params.realm_video_chat_provider = + page_params.realm_available_video_chat_providers.jitsi_meet.id; + handler(ev); // video link ids consist of 15 random digits let video_link_regex = /\[Click to join video call\]\(https:\/\/meet.jit.si\/\d{15}\)/; assert(video_link_regex.test(syntax_to_insert)); + page_params.jitsi_server_url = null; + called = false; + handler(ev); + assert(!called); + page_params.realm_video_chat_provider = page_params.realm_available_video_chat_providers.google_hangouts.id; page_params.realm_google_hangouts_domain = 'zulip'; @@ -1430,11 +1467,6 @@ run_test('on_events', () => { video_link_regex = /\[Click to join video call\]\(example\.zoom\.com\)/; assert(video_link_regex.test(syntax_to_insert)); - page_params.jitsi_server_url = null; - called = false; - - handler(ev); - assert(!called); }()); (function test_markdown_preview_compose_clicked() { @@ -1672,3 +1704,33 @@ run_test('narrow_button_titles', () => { assert.equal($("#left_bar_compose_stream_button_big").text(), i18n.t("New topic")); assert.equal($("#left_bar_compose_private_button_big").text(), i18n.t("New private message")); }); + +run_test('test_video_chat_button_toggle', () => { + reset_jquery(); + stub_out_video_calls(); + + page_params.realm_video_chat_provider = + page_params.realm_available_video_chat_providers.disabled.id; + compose.initialize(); + assert.equal($("#below-compose-content .video_link").visible(), false); + + reset_jquery(); + stub_out_video_calls(); + page_params.realm_video_chat_provider = + page_params.realm_available_video_chat_providers.jitsi_meet.id; + compose.initialize(); + assert.equal($("#below-compose-content .video_link").visible(), false); + + reset_jquery(); + stub_out_video_calls(); + page_params.jitsi_server_url = 'https://meet.jit.si'; + compose.initialize(); + assert.equal($("#below-compose-content .video_link").visible(), true); + + reset_jquery(); + stub_out_video_calls(); + page_params.realm_video_chat_provider = + page_params.realm_available_video_chat_providers.google_hangouts.id; + compose.initialize(); + assert.equal($("#below-compose-content .video_link").visible(), true); +}); diff --git a/frontend_tests/node_tests/dispatch.js b/frontend_tests/node_tests/dispatch.js --- a/frontend_tests/node_tests/dispatch.js +++ b/frontend_tests/node_tests/dispatch.js @@ -50,6 +50,10 @@ set_global('settings_bots', { update_bot_permissions_ui: noop, }); +set_global('compose', { + update_video_chat_button_display: noop, +}); + set_global('settings_exports', { populate_exports_table: function (exports) { return exports; diff --git a/frontend_tests/node_tests/ui_init.js b/frontend_tests/node_tests/ui_init.js --- a/frontend_tests/node_tests/ui_init.js +++ b/frontend_tests/node_tests/ui_init.js @@ -187,6 +187,9 @@ $(".top_left_starred_messages").set_find_results('.count', count_stub); $("#tab_list .stream").length = 0; +compose.compute_show_video_chat_button = () => {}; +$("#below-compose-content .video_link").toggle = () => {}; + run_test('initialize_everything', () => { ui_init.initialize_everything(); }); diff --git a/zerver/tests/test_realm.py b/zerver/tests/test_realm.py --- a/zerver/tests/test_realm.py +++ b/zerver/tests/test_realm.py @@ -498,7 +498,7 @@ def test_invalid_integer_attribute_values(self) -> None: invite_to_stream_policy=10, email_address_visibility=10, message_retention_days=10, - video_chat_provider=0, + video_chat_provider=4, waiting_period_threshold=-10, digest_weekday=10, user_group_edit_policy=10, @@ -533,12 +533,18 @@ def test_change_video_chat_provider(self) -> None: self.assertEqual(get_realm('zulip').video_chat_provider, Realm.VIDEO_CHAT_PROVIDERS['jitsi_meet']['id']) self.login('iago') - invalid_video_chat_provider_value = 0 + invalid_video_chat_provider_value = 4 req = {"video_chat_provider": ujson.dumps(invalid_video_chat_provider_value)} result = self.client_patch('/json/realm', req) self.assert_json_error(result, ("Invalid video_chat_provider {}").format(invalid_video_chat_provider_value)) + req = {"video_chat_provider": ujson.dumps(Realm.VIDEO_CHAT_PROVIDERS['disabled']['id'])} + result = self.client_patch('/json/realm', req) + self.assert_json_success(result) + self.assertEqual(get_realm('zulip').video_chat_provider, + Realm.VIDEO_CHAT_PROVIDERS['disabled']['id']) + req = {"video_chat_provider": ujson.dumps(Realm.VIDEO_CHAT_PROVIDERS['google_hangouts']['id'])} result = self.client_patch('/json/realm', req) self.assert_json_error(result, "Invalid domain: Domain can't be empty.")
Add support for disabling the video call button We got a request to disable Zulip's in-app video call button, which I think is pretty reasonable. This can be implemented in the models.py layer by extending `VIDEO_CHAT_PROVIDERS` with an option for no button (value=0 makes sense for this to me). And then that value should cause the video call button to be hidden in the frontend templates for both message editing and compose. Tagging as a priority since it should be simple and valuable. @sahil839 maybe this is a good issue for you?
Hello @zulip/server-compose members, this issue was labeled with the "area: compose" label, so you may want to check it out! <!-- areaLabelAddition --> @zulipbot claim
2020-04-07T22:38:06
zulip/zulip
14,524
zulip__zulip-14524
[ "14171", "14171" ]
2610d645b417ccb80efea3daf192882aef71fd3e
diff --git a/zerver/lib/integrations.py b/zerver/lib/integrations.py --- a/zerver/lib/integrations.py +++ b/zerver/lib/integrations.py @@ -327,7 +327,7 @@ def __init__(self, name: str, *args: Any, **kwargs: Any) -> None: WebhookIntegration('pivotal', ['project-management'], display_name='Pivotal Tracker'), WebhookIntegration('raygun', ['monitoring'], display_name="Raygun"), WebhookIntegration('reviewboard', ['version-control'], display_name="ReviewBoard"), - WebhookIntegration('semaphore', ['continuous-integration', 'deployment'], stream_name='builds'), + WebhookIntegration('semaphore', ['continuous-integration', 'deployment']), WebhookIntegration('sentry', ['monitoring']), WebhookIntegration('slack', ['communication']), WebhookIntegration('solano', ['continuous-integration'], display_name='Solano Labs'), diff --git a/zerver/webhooks/semaphore/view.py b/zerver/webhooks/semaphore/view.py --- a/zerver/webhooks/semaphore/view.py +++ b/zerver/webhooks/semaphore/view.py @@ -1,5 +1,7 @@ # Webhooks for external integrations. -from typing import Any, Dict +from typing import Any, Dict, Tuple, Optional + +from urllib.parse import urlparse from django.http import HttpRequest, HttpResponse @@ -9,6 +11,8 @@ from zerver.lib.webhooks.common import check_send_webhook_message from zerver.models import UserProfile +# Semaphore Classic Templates + BUILD_TEMPLATE = """ [Build {build_number}]({build_url}) {status}: * **Commit**: [{commit_hash}: {commit_message}]({commit_url}) @@ -22,13 +26,83 @@ * **Server**: {server_name} """.strip() +# Semaphore 2.0 Templates + +# Currently, Semaphore 2.0 only supports GitHub, while Semaphore Classic +# supports Bitbucket too. The payload does not have URLs for commits, tags, +# pull requests, etc. So, we use separate templates for GitHub and construct +# the URLs ourselves. For any other repository hosting services we use +# templates that don't have any links in them. + +GH_PUSH_TEMPLATE = """ +[{pipeline_name}]({workflow_url}) pipeline **{pipeline_result}**: +* **Commit**: [({commit_hash})]({commit_url}) {commit_message} +* **Branch**: {branch_name} +* **Author**: [{author_name}]({author_url}) +""".strip() + +PUSH_TEMPLATE = """ +[{pipeline_name}]({workflow_url}) pipeline **{pipeline_result}**: +* **Commit**: ({commit_hash}) {commit_message} +* **Branch**: {branch_name} +* **Author**: {author_name} +""".strip() + +GH_PULL_REQUEST_TEMPLATE = """ +[{pipeline_name}]({workflow_url}) pipeline **{pipeline_result}**: +* **Pull Request**: [{pull_request_title}]({pull_request_url}) +* **Branch**: {branch_name} +* **Author**: [{author_name}]({author_url}) +""".strip() + +PULL_REQUEST_TEMPLATE = """ +[{pipeline_name}]({workflow_url}) pipeline **{pipeline_result}**: +* **Pull Request**: {pull_request_title} (#{pull_request_number}) +* **Branch**: {branch_name} +* **Author**: {author_name} +""".strip() + +GH_TAG_TEMPLATE = """ +[{pipeline_name}]({workflow_url}) pipeline **{pipeline_result}**: +* **Tag**: [{tag_name}]({tag_url}) +* **Author**: [{author_name}]({author_url}) +""".strip() + +TAG_TEMPLATE = """ +[{pipeline_name}]({workflow_url}) pipeline **{pipeline_result}**: +* **Tag**: {tag_name} +* **Author**: {author_name} +""".strip() + +DEFAULT_TEMPLATE = """ +[{pipeline_name}]({workflow_url}) pipeline **{pipeline_result}** for {event_name} event +""".strip() + TOPIC_TEMPLATE = "{project}/{branch}" +GITHUB_URL_TEMPLATES = { + 'commit': '{repo_url}/commit/{commit_id}', + 'pull_request': '{repo_url}/pull/{pr_number}', + 'tag': '{repo_url}/tree/{tag_name}', + 'user': 'https://github.com/{username}', +} + + @api_key_only_webhook_view('Semaphore') @has_request_variables def api_semaphore_webhook(request: HttpRequest, user_profile: UserProfile, payload: Dict[str, Any]=REQ(argument_type='body')) -> HttpResponse: + content, project_name, branch_name = ( + semaphore_classic(payload) if 'event' in payload else semaphore_2(payload) + ) + subject = ( + TOPIC_TEMPLATE.format(project=project_name, branch=branch_name) if branch_name else project_name + ) + check_send_webhook_message(request, user_profile, subject, content) + return json_success() + +def semaphore_classic(payload: Dict[str, Any]) -> Tuple[str, str, str]: # semaphore only gives the last commit, even if there were multiple commits # since the last build branch_name = payload["branch_name"] @@ -76,10 +150,65 @@ def api_semaphore_webhook(request: HttpRequest, user_profile: UserProfile, content = "{event}: {result}".format( event=event, result=result) - subject = TOPIC_TEMPLATE.format( - project=project_name, - branch=branch_name + return content, project_name, branch_name + +def semaphore_2(payload: Dict[str, Any]) -> Tuple[str, str, Optional[str]]: + repo_url = payload["repository"]["url"] + project_name = payload["project"]["name"] + organization_name = payload["organization"]["name"] + author_name = payload["revision"]["sender"]["login"] + workflow_id = payload['workflow']['id'] + context = dict( + author_name=author_name, + author_url=GITHUB_URL_TEMPLATES['user'].format(repo_url=repo_url, username=author_name), + pipeline_name=payload["pipeline"]["name"], + pipeline_result=payload["pipeline"]["result"], + workflow_url='https://{org}.semaphoreci.com/workflows/{id}'.format( + org=organization_name, id=workflow_id) ) - check_send_webhook_message(request, user_profile, subject, content) - return json_success() + if payload["revision"]["reference_type"] == "branch": # push event + commit_id = payload["revision"]["commit_sha"] + branch_name = payload["revision"]["branch"]["name"] + context.update( + branch_name=branch_name, + commit_id=commit_id, + commit_hash=commit_id[:7], + commit_message=payload["revision"]["commit_message"], + commit_url=GITHUB_URL_TEMPLATES['commit'].format(repo_url=repo_url, commit_id=commit_id), + ) + template = GH_PUSH_TEMPLATE if is_github_repo(repo_url) else PUSH_TEMPLATE + content = template.format(**context) + elif payload["revision"]["reference_type"] == "pull_request": + pull_request = payload["revision"]["pull_request"] + branch_name = pull_request["branch_name"] + pull_request_title = pull_request["name"] + pull_request_number = pull_request["number"] + pull_request_url = GITHUB_URL_TEMPLATES['pull_request'].format( + repo_url=repo_url, pr_number=pull_request_number) + context.update( + branch_name=branch_name, + pull_request_title=pull_request_title, + pull_request_url=pull_request_url, + pull_request_number=pull_request_number, + ) + template = GH_PULL_REQUEST_TEMPLATE if is_github_repo(repo_url) else PULL_REQUEST_TEMPLATE + content = template.format(**context) + elif payload["revision"]["reference_type"] == "tag": + branch_name = '' + tag_name = payload["revision"]["tag"]["name"] + tag_url = GITHUB_URL_TEMPLATES['tag'].format(repo_url=repo_url, tag_name=tag_name) + context.update( + tag_name=tag_name, + tag_url=tag_url, + ) + template = GH_TAG_TEMPLATE if is_github_repo(repo_url) else TAG_TEMPLATE + content = template.format(**context) + else: # should never get here: unknown event + branch_name = '' + context.update(event_name=payload["revision"]["reference_type"]) + content = DEFAULT_TEMPLATE.format(**context) + return content, project_name, branch_name + +def is_github_repo(repo_url: str) -> bool: + return urlparse(repo_url).hostname == 'github.com'
diff --git a/zerver/webhooks/semaphore/tests.py b/zerver/webhooks/semaphore/tests.py --- a/zerver/webhooks/semaphore/tests.py +++ b/zerver/webhooks/semaphore/tests.py @@ -1,4 +1,6 @@ # -*- coding: utf-8 -*- +import ujson +from mock import patch from zerver.lib.test_classes import WebhookTestCase @@ -10,6 +12,7 @@ class SemaphoreHookTests(WebhookTestCase): # contain information on the repo and branch, and the message has links and # details about the build, deploy, server, author, and commit + # Tests for Semaphore Classic def test_semaphore_build(self) -> None: expected_topic = u"knighthood/master" # repo/branch expected_message = """ @@ -31,5 +34,89 @@ def test_semaphore_deploy(self) -> None: self.send_and_test_stream_message('deploy', expected_topic, expected_message, content_type="application/x-www-form-urlencoded") + # Tests For Semaphore 2.0 + + def test_semaphore2_push(self) -> None: + expected_topic = u"notifications/rw/webhook_impl" # repo/branch + expected_message = """ +[Notifications](https://semaphore.semaphoreci.com/workflows/acabe58e-4bcc-4d39-be06-e98d71917703) pipeline **stopped**: +* **Commit**: [(2d9f5fc)](https://github.com/renderedtext/notifications/commit/2d9f5fcec1ca7c68fa7bd44dd58ec4ff65814563) Implement webhooks for SemaphoreCI +* **Branch**: rw/webhook_impl +* **Author**: [radwo](https://github.com/radwo) +""".strip() + self.send_and_test_stream_message('push', expected_topic, expected_message, + content_type="application/json") + + def test_semaphore2_push_non_gh_repo(self) -> None: + expected_topic = u"notifications/rw/webhook_impl" # repo/branch + expected_message = """ +[Notifications](https://semaphore.semaphoreci.com/workflows/acabe58e-4bcc-4d39-be06-e98d71917703) pipeline **stopped**: +* **Commit**: (2d9f5fc) Implement webhooks for SemaphoreCI +* **Branch**: rw/webhook_impl +* **Author**: radwo +""".strip() + with patch('zerver.webhooks.semaphore.view.is_github_repo', return_value=False): + self.send_and_test_stream_message('push', expected_topic, expected_message, + content_type="application/json") + + def test_semaphore_pull_request(self) -> None: + expected_topic = u"notifications/test-notifications" + expected_message = """ +[Notifications](https://semaphore.semaphoreci.com/workflows/84383f37-d025-4811-b719-61c6acc92a1e) pipeline **failed**: +* **Pull Request**: [Testing PR notifications](https://github.com/renderedtext/notifications/pull/3) +* **Branch**: test-notifications +* **Author**: [radwo](https://github.com/radwo) +""".strip() + self.send_and_test_stream_message('pull_request', expected_topic, expected_message, + content_type="application/json") + + def test_semaphore_pull_request_non_gh_repo(self) -> None: + expected_topic = u"notifications/test-notifications" + expected_message = """ +[Notifications](https://semaphore.semaphoreci.com/workflows/84383f37-d025-4811-b719-61c6acc92a1e) pipeline **failed**: +* **Pull Request**: Testing PR notifications (#3) +* **Branch**: test-notifications +* **Author**: radwo +""".strip() + with patch('zerver.webhooks.semaphore.view.is_github_repo', return_value=False): + self.send_and_test_stream_message('pull_request', expected_topic, expected_message, + content_type="application/json") + + def test_semaphore_tag(self) -> None: + expected_topic = u"notifications" + expected_message = """ +[Notifications](https://semaphore.semaphoreci.com/workflows/a8704319-2422-4828-9b11-6b2afa3554e6) pipeline **stopped**: +* **Tag**: [v1.0.1](https://github.com/renderedtext/notifications/tree/v1.0.1) +* **Author**: [radwo](https://github.com/radwo) +""".strip() + self.send_and_test_stream_message('tag', expected_topic, expected_message, + content_type="application/json") + + def test_semaphore_tag_non_gh_repo(self) -> None: + expected_topic = u"notifications" + expected_message = """ +[Notifications](https://semaphore.semaphoreci.com/workflows/a8704319-2422-4828-9b11-6b2afa3554e6) pipeline **stopped**: +* **Tag**: v1.0.1 +* **Author**: radwo +""".strip() + with patch('zerver.webhooks.semaphore.view.is_github_repo', return_value=False): + self.send_and_test_stream_message('tag', expected_topic, expected_message, + content_type="application/json") + + def test_semaphore_unknown_event(self) -> None: + expected_topic = u"notifications" + expected_message = """ +[Notifications](https://semaphore.semaphoreci.com/workflows/a8704319-2422-4828-9b11-6b2afa3554e6) pipeline **stopped** for unknown event +""".strip() + with patch('zerver.webhooks.semaphore.tests.SemaphoreHookTests.get_body', self.get_unknown_event): + self.send_and_test_stream_message('tag', expected_topic, expected_message, + content_type="application/json") + def get_body(self, fixture_name: str) -> str: return self.webhook_fixture_data("semaphore", fixture_name, file_type="json") + + def get_unknown_event(self, fixture_name: str) -> str: + """Return modified payload with revision.reference_type changed""" + fixture_data = ujson.loads(self.webhook_fixture_data("semaphore", fixture_name, file_type="json")) + fixture_data['revision']['reference_type'] = 'unknown' + return fixture_data
Semaphore CI's webhook notifications don't work It looks like the payload format for the webhook notifications has changed. The old format described [here](https://semaphoreci.com/docs/branches-and-builds-api.html) is expected by the webhook code. The test fixtures also have payloads that look very similar to what has been described here. The new payload format is documented [here](https://docs.semaphoreci.com/essentials/webhook-notifications/#notification-payload) Semaphore CI's webhook notifications don't work It looks like the payload format for the webhook notifications has changed. The old format described [here](https://semaphoreci.com/docs/branches-and-builds-api.html) is expected by the webhook code. The test fixtures also have payloads that look very similar to what has been described here. The new payload format is documented [here](https://docs.semaphoreci.com/essentials/webhook-notifications/#notification-payload)
Hello @zulip/server-integrations members, this issue was labeled with the "area: integrations" label, so you may want to check it out! <!-- areaLabelAddition --> @zulipbot claim Welcome to Zulip, @abhinav824! We just sent you an invite to collaborate on this repository at https://github.com/zulip/zulip/invitations. Please accept this invite in order to claim this issue and begin a fun, rewarding experience contributing to Zulip! Here's some tips to get you off to a good start: * Join me on the [Zulip developers' server](https://chat.zulip.org), to get help, chat about this issue, and meet the other developers. * [Unwatch this repository](https://help.github.com/articles/unwatching-repositories/), so that you don't get 100 emails a day. As you work on this issue, you'll also want to refer to the [Zulip code contribution guide](https://zulip.readthedocs.io/en/latest/contributing/index.html), as well as the rest of the developer documentation on that site. See you on the other side (that is, the pull request side)! The notification payload seems less useful than what it was before - links to the commit/changes, and a link to the build are missing from the payload. :see_no_evil: I've also done some research on this and i want to mention a few things : 1. There are two versions of semaphore - **Semaphore classic** and **Semaphore 2.0**, which are operating as two distinct products. Have a look on this [page](https://docs.semaphoreci.com/guided-tour/migration-guide-for-semaphore-classic-users/#semaphore-classic-and-semaphore-20-are-distinct-products). 1. So, our existing code should work for semaphore classic (well, i'm not sure whether it's working or not) and we need to extend our support for semaphore 2.0. (The old payload corresponds to semaphore classic and the new one is for Semaphore 2.0). 1. For extending our support to semaphore 2.0, i think we need to understand the basic working [concept](https://docs.semaphoreci.com/guided-tour/concepts/#concepts) behind it (I believe it's different from the classic one). Maybe, then we'll be able to extract the useful information from the new notification payload and send a notification. 1. And yes, i also think that the new payload is somewhat less useful. Anyway we can generate commit links using the commit id's but, maybe we cannot generate link to the build. @punchagan I have two kinds of approach to this issue. We can handle both the payloads in the existing view and trigger the notification according to the payload we'll receive or we can have a separate integration for Semaphore 2.0. So, how should i proceed now ? From a user's perspective, it might be best to just have a single integration and do the right thing in the webhook code. But, the help instructions in the `/integrations` page would probably be slightly different for the two versions, since the UI in Semaphore would be different, and the notification in Zulip itself would be slightly different because the payloads are different. Following from the Bitbucket example, having a separate integration might be the way to go. Hello @zulip/server-integrations members, this issue was labeled with the "area: integrations" label, so you may want to check it out! <!-- areaLabelAddition --> @zulipbot claim Welcome to Zulip, @abhinav824! We just sent you an invite to collaborate on this repository at https://github.com/zulip/zulip/invitations. Please accept this invite in order to claim this issue and begin a fun, rewarding experience contributing to Zulip! Here's some tips to get you off to a good start: * Join me on the [Zulip developers' server](https://chat.zulip.org), to get help, chat about this issue, and meet the other developers. * [Unwatch this repository](https://help.github.com/articles/unwatching-repositories/), so that you don't get 100 emails a day. As you work on this issue, you'll also want to refer to the [Zulip code contribution guide](https://zulip.readthedocs.io/en/latest/contributing/index.html), as well as the rest of the developer documentation on that site. See you on the other side (that is, the pull request side)! The notification payload seems less useful than what it was before - links to the commit/changes, and a link to the build are missing from the payload. :see_no_evil: I've also done some research on this and i want to mention a few things : 1. There are two versions of semaphore - **Semaphore classic** and **Semaphore 2.0**, which are operating as two distinct products. Have a look on this [page](https://docs.semaphoreci.com/guided-tour/migration-guide-for-semaphore-classic-users/#semaphore-classic-and-semaphore-20-are-distinct-products). 1. So, our existing code should work for semaphore classic (well, i'm not sure whether it's working or not) and we need to extend our support for semaphore 2.0. (The old payload corresponds to semaphore classic and the new one is for Semaphore 2.0). 1. For extending our support to semaphore 2.0, i think we need to understand the basic working [concept](https://docs.semaphoreci.com/guided-tour/concepts/#concepts) behind it (I believe it's different from the classic one). Maybe, then we'll be able to extract the useful information from the new notification payload and send a notification. 1. And yes, i also think that the new payload is somewhat less useful. Anyway we can generate commit links using the commit id's but, maybe we cannot generate link to the build. @punchagan I have two kinds of approach to this issue. We can handle both the payloads in the existing view and trigger the notification according to the payload we'll receive or we can have a separate integration for Semaphore 2.0. So, how should i proceed now ? From a user's perspective, it might be best to just have a single integration and do the right thing in the webhook code. But, the help instructions in the `/integrations` page would probably be slightly different for the two versions, since the UI in Semaphore would be different, and the notification in Zulip itself would be slightly different because the payloads are different. Following from the Bitbucket example, having a separate integration might be the way to go.
2020-04-09T17:29:26
zulip/zulip
14,550
zulip__zulip-14550
[ "14405" ]
ca0154c42547dfb4319c1534295b74635cc6cf82
diff --git a/zerver/views/messages.py b/zerver/views/messages.py --- a/zerver/views/messages.py +++ b/zerver/views/messages.py @@ -48,7 +48,8 @@ from zerver.lib.utils import statsd from zerver.lib.validator import \ check_list, check_int, check_dict, check_string, check_bool, \ - check_string_or_int_list, check_string_or_int, check_string_in + check_string_or_int_list, check_string_or_int, check_string_in, \ + check_required_string from zerver.lib.zephyr import compute_mit_user_fullname from zerver.models import Message, UserProfile, Stream, Subscription, Client,\ Realm, RealmDomain, Recipient, UserMessage, \ @@ -541,12 +542,15 @@ def convert_term(elem: Union[Dict[str, Any], List[str]]) -> Dict[str, Any]: # operators_supporting_id, or operators_supporting_ids array. operators_supporting_id = ['sender', 'group-pm-with', 'stream'] operators_supporting_ids = ['pm-with'] + operators_non_empty_operand = {'search'} operator = elem.get('operator', '') if operator in operators_supporting_id: operand_validator = check_string_or_int elif operator in operators_supporting_ids: operand_validator = check_string_or_int_list + elif operator in operators_non_empty_operand: + operand_validator = check_required_string else: operand_validator = check_string
diff --git a/zerver/tests/test_narrow.py b/zerver/tests/test_narrow.py --- a/zerver/tests/test_narrow.py +++ b/zerver/tests/test_narrow.py @@ -2347,6 +2347,10 @@ def test_invalid_narrow_operand_in_dict(self) -> None: for operand in ['is', 'near', 'has', 'id']: self.exercise_bad_narrow_operand_using_dict_api(operand, invalid_operands, error_msg) + # Disallow empty search terms + error_msg = 'elem["operand"] cannot be blank.' + self.exercise_bad_narrow_operand_using_dict_api('search', [''], error_msg) + # The exercise_bad_narrow_operand helper method uses legacy tuple format to # test bad narrow, this method uses the current dict api format def exercise_bad_narrow_operand_using_dict_api(self, operator: str,
Empty search query in /json/messages endpoint causes 500 internal server error. Sending an empty search query (not possible directly from the web app UI) causes a 500 internal server error. The endpoint is json/messages and the query string is: ``` anchor=newest num_before=50 num_after=50 narrow=[{"negated":false,"operator":"search","operand":""}] client_gravatar=true ``` and the error stacktrace is ``` 2020-03-30 18:36:06.228 INFO [zr] 172.17.0.1 GET 500 49ms (db: 34ms/3q) /json/messages [streams,search] ([email protected] via website) 2020-03-30 18:36:06.228 ERR [django.request] Internal Server Error: /json/messages 2020-03-30 18:36:06.229 ERR [django.server] "GET /json/messages?anchor=newest&num_before=50&num_after=50&narrow=[{%22negated%22:true,%22operator%22:%22streams%22,%22operand%22:%22public%22},{%22negated%22:false,%22operator%22:%22search%22,%22operand%22:%22%22}]&client_gravatar=true HTTP/1.1" 500 49 2020-03-30 18:36:07.830 ERR [] Traceback (most recent call last): File "/srv/zulip-py3-venv/lib/python3.6/site-packages/sqlalchemy/engine/base.py", line 1246, in _execute_context cursor, statement, parameters, context File "/srv/zulip-py3-venv/lib/python3.6/site-packages/sqlalchemy/engine/default.py", line 588, in do_execute cursor.execute(statement, parameters) File "/srv/zulip/zerver/lib/db.py", line 31, in execute return wrapper_execute(self, super().execute, query, vars) File "/srv/zulip/zerver/lib/db.py", line 18, in wrapper_execute return action(sql, params) psycopg2.errors.SystemError: pgroonga: query_extract_keywords: failed to parse expression: Syntax error: <||> The above exception was the direct cause of the following exception: Traceback (most recent call last): File "/srv/zulip-py3-venv/lib/python3.6/site-packages/django/core/handlers/base.py", line 113, in _get_response response = wrapped_callback(request, *callback_args, **callback_kwargs) File "/srv/zulip/zerver/lib/rest.py", line 27, in _wrapped_view_func response = view_func(request, *args, **kwargs) File "/srv/zulip-py3-venv/lib/python3.6/site-packages/django/views/decorators/csrf.py", line 54, in wrapped_view return view_func(*args, **kwargs) File "/srv/zulip/zerver/lib/rest.py", line 166, in rest_dispatch return target_function(request, **kwargs) File "/srv/zulip-py3-venv/lib/python3.6/site-packages/django/utils/decorators.py", line 142, in _wrapped_view response = view_func(request, *args, **kwargs) File "/srv/zulip/zerver/decorator.py", line 677, in _wrapped_view_func return authenticate_log_and_execute_json(request, view_func, *args, **kwargs) File "/srv/zulip/zerver/decorator.py", line 656, in authenticate_log_and_execute_json return limited_view_func(request, user_profile, *args, **kwargs) File "/srv/zulip/zerver/decorator.py", line 796, in wrapped_func return func(request, *args, **kwargs) File "/srv/zulip/zerver/lib/request.py", line 368, in _wrapped_view_func return view_func(request, *args, **kwargs) File "/srv/zulip/zerver/views/messages.py", line 903, in get_messages_backend rows = list(sa_conn.execute(query).fetchall()) File "/srv/zulip-py3-venv/lib/python3.6/site-packages/sqlalchemy/engine/base.py", line 982, in execute return meth(self, multiparams, params) File "/srv/zulip-py3-venv/lib/python3.6/site-packages/sqlalchemy/sql/elements.py", line 293, in _execute_on_connection return connection._execute_clauseelement(self, multiparams, params) File "/srv/zulip-py3-venv/lib/python3.6/site-packages/sqlalchemy/engine/base.py", line 1101, in _execute_clauseelement distilled_params, File "/srv/zulip-py3-venv/lib/python3.6/site-packages/sqlalchemy/engine/base.py", line 1250, in _execute_context e, statement, parameters, cursor, context File "/srv/zulip-py3-venv/lib/python3.6/site-packages/sqlalchemy/engine/base.py", line 1476, in _handle_dbapi_exception util.raise_from_cause(sqlalchemy_exception, exc_info) File "/srv/zulip-py3-venv/lib/python3.6/site-packages/sqlalchemy/util/compat.py", line 398, in raise_from_cause reraise(type(exception), exception, tb=exc_tb, cause=cause) File "/srv/zulip-py3-venv/lib/python3.6/site-packages/sqlalchemy/util/compat.py", line 152, in reraise raise value.with_traceback(tb) File "/srv/zulip-py3-venv/lib/python3.6/site-packages/sqlalchemy/engine/base.py", line 1246, in _execute_context cursor, statement, parameters, context File "/srv/zulip-py3-venv/lib/python3.6/site-packages/sqlalchemy/engine/default.py", line 588, in do_execute cursor.execute(statement, parameters) File "/srv/zulip/zerver/lib/db.py", line 31, in execute return wrapper_execute(self, super().execute, query, vars) File "/srv/zulip/zerver/lib/db.py", line 18, in wrapper_execute return action(sql, params) sqlalchemy.exc.OperationalError: (psycopg2.errors.SystemError) pgroonga: query_extract_keywords: failed to parse expression: Syntax error: <||> [SQL: SELECT /* get_messages */ anon_1.message_id, anon_1.flags, anon_1.subject, anon_1.rendered_content, anon_1.content_matches, anon_1.topic_matches FROM (SELECT message_id, flags, subject, rendered_content, pgroonga_match_positions_character(rendered_content, pgroonga_query_extract_keywords(escape_html(%(escape_html_1)s))) AS content_matches, pgroonga_match_positions_character(escape_html(subject), pgroonga_query_extract_keywords(escape_html(%(escape_html_1)s))) AS topic_matches FROM zerver_usermessage JOIN zerver_message ON zerver_usermessage.message_id = zerver_message.id WHERE user_profile_id = %(param_1)s AND recipient_id NOT IN (%(recipient_id_1)s, %(recipient_id_2)s, %(recipient_id_3)s, %(recipient_id_4)s, %(recipient_id_5)s, %(recipient_id_6)s, %(recipient_id_7)s, %(recipient_id_8)s, %(recipient_id_9)s, %(recipient_id_10)s, %(recipient_id_11)s, %(recipient_id_12)s, %(recipient_id_13)s, %(recipient_id_14)s) AND (search_pgroonga &@~ escape_html(%(escape_html_1)s)) ORDER BY message_id DESC LIMIT %(param_2)s) AS anon_1 ORDER BY message_id ASC] [parameters: {'escape_html_1': '', 'param_1': 10, 'recipient_id_1': 18, 'recipient_id_2': 19, 'recipient_id_3': 20, 'recipient_id_4': 21, 'recipient_id_5': 22, 'recipient_id_6': 26, 'recipient_id_7': 27, 'recipient_id_8': 28, 'recipient_id_9': 29, 'recipient_id_10': 30, 'recipient_id_11': 31, 'recipient_id_12': 32, 'recipient_id_13': 33, 'recipient_id_14': 34, 'param_2': 50}] (Background on this error at: http://sqlalche.me/e/e3q8) ``` on the current master (35b444d59cb00ba2ca2973a5f7fef42f844ab445).
This might be a bug just impacting the PGroonga search backend, based on the traceback, but it probably isn't -- we should be rejecting the `''` in the `by_search` codepath before we get to the search backend code. Hello @zulip/server-search members, this issue was labeled with the "area: search" label, so you may want to check it out! <!-- areaLabelAddition --> Tagging as a priority since all known 500 errors are. Ah, `pgroonga_query_extract_keywords()` doesn't support empty string as input for now. I'll improve `pgroonga_query_extract_keywords()` to accept it later but please ignore empty string in Zulip.
2020-04-11T15:49:14
zulip/zulip
14,591
zulip__zulip-14591
[ "14492" ]
a552c2e5f9bedf1f4d5a2af57fe9d91833d1eb9a
diff --git a/zerver/lib/topic.py b/zerver/lib/topic.py --- a/zerver/lib/topic.py +++ b/zerver/lib/topic.py @@ -1,8 +1,5 @@ -import datetime - from django.db import connection from django.db.models.query import QuerySet, Q -from django.utils.timezone import now as timezone_now from sqlalchemy.sql import ( column, @@ -124,15 +121,7 @@ def update_messages_for_topic_edit(message: Message, new_stream: Optional[Stream]) -> List[Message]: propagate_query = Q(recipient = message.recipient, subject = orig_topic_name) if propagate_mode == 'change_all': - # We only change messages up to 7 days in the past, to avoid hammering our - # DB by changing an unbounded amount of messages - # - # TODO: Look at removing this restriction and/or add a "change_last_week" - # option; this behavior feels buggy. - before_bound = timezone_now() - datetime.timedelta(days=7) - - propagate_query = (propagate_query & ~Q(id = message.id) & - Q(date_sent__range=(before_bound, timezone_now()))) + propagate_query = propagate_query & ~Q(id = message.id) if propagate_mode == 'change_later': propagate_query = propagate_query & Q(id__gt = message.id)
Remove the TODO "7 days" restriction for edit and move topics Right now we have a restriction to move just the messages in the last week in method: `update_messages_for_topic_edit` file `zerver/lib/topic.py` ``` # We only change messages up to 7 days in the past, to avoid hammering our # DB by changing an unbounded amount of messages # # TODO: Look at removing this restriction and/or add a "change_last_week" # option; this behavior feels buggy. ```
Hello @zulip/server-message-view members, this issue was labeled with the "area: message-editing" label, so you may want to check it out! <!-- areaLabelAddition --> Tagging this as a release blocker to make sure we look at it before releasing with #13912. @zulipbot claim
2020-04-15T19:21:38
zulip/zulip
14,678
zulip__zulip-14678
[ "14595" ]
10d93ae1b77ffbd20d2d2b41538b17c91f82ce37
diff --git a/zerver/lib/logging_util.py b/zerver/lib/logging_util.py --- a/zerver/lib/logging_util.py +++ b/zerver/lib/logging_util.py @@ -119,7 +119,7 @@ def skip_200_and_304(record: logging.LogRecord) -> bool: # Apparently, `status_code` is added by Django and is not an actual # attribute of LogRecord; as a result, mypy throws an error if we # access the `status_code` attribute directly. - if getattr(record, 'status_code') in [200, 304]: + if getattr(record, 'status_code', None) in [200, 304]: return False return True
AttributeError: 'LogRecord' object has no attribute 'status_code' I have a development environment with the latest Git version. After performing many requests, I get blocked because of rate limiting. Then, the following error is logged in the console: ``` ---------------------------------------- Exception happened during processing of request from ('127.0.0.1', 56444) 2020-04-16 11:35:49.159 INFO [zr] 127.0.0.1 POST 429 65ms (mem: 57ms/4) (+start: 24ms) /json/messages (10@zulip via website) 2020-04-16 11:35:49.160 INFO [zr] status=429, data=b'{"result":"error","msg":"API usage exceeded rate limit","retry-after":2.6131470203}\n', uid=10@zulip 2020-04-16 11:35:49.162 INFO [zr] 127.0.0.1 POST 429 11ms (mem: 7ms/2) /json/messages (10@zulip via website) 2020-04-16 11:35:49.162 WARN [django.server] "POST /json/messages HTTP/1.1" 429 84 2020-04-16 11:35:49.173 INFO [zr] status=429, data=b'{"result":"error","msg":"API usage exceeded rate limit","retry-after":2.6109778881}\n', uid=10@zulip 2020-04-16 11:35:49.179 INFO [zr] 127.0.0.1 POST 429 20ms (+start: 51ms) /json/messages (10@zulip via website) 2020-04-16 11:35:49.182 WARN [django.server] "POST /json/messages HTTP/1.1" 429 84 2020-04-16 11:35:49.195 INFO [zr] status=429, data=b'{"result":"error","msg":"API usage exceeded rate limit","retry-after":2.5940015316}\n', uid=10@zulip Traceback (most recent call last): File "/usr/lib/python3.7/socketserver.py", line 650, in process_request_thread self.finish_request(request, client_address) File "/usr/lib/python3.7/socketserver.py", line 360, in finish_request self.RequestHandlerClass(request, client_address, self) File "/usr/lib/python3.7/socketserver.py", line 720, in __init__ self.handle() File "/srv/zulip-py3-venv/lib/python3.7/site-packages/django/core/servers/basehttp.py", line 171, in handle self.handle_one_request() File "/srv/zulip-py3-venv/lib/python3.7/site-packages/django/core/servers/basehttp.py", line 187, in handle_one_request if not self.parse_request(): # An error code has been sent, just exit File "/usr/lib/python3.7/http/server.py", line 322, in parse_request "Bad request syntax (%r)" % requestline) File "/usr/lib/python3.7/http/server.py", line 456, in send_error self.log_error("code %d, message %s", code, message) File "/usr/lib/python3.7/http/server.py", line 558, in log_error self.log_message(format, *args) File "/srv/zulip-py3-venv/lib/python3.7/site-packages/django/core/servers/basehttp.py", line 154, in log_message level(format, *args, extra=extra) File "/usr/lib/python3.7/logging/__init__.py", line 1383, in info self._log(INFO, msg, args, **kwargs) File "/usr/lib/python3.7/logging/__init__.py", line 1519, in _log self.handle(record) File "/usr/lib/python3.7/logging/__init__.py", line 1528, in handle if (not self.disabled) and self.filter(record): File "/usr/lib/python3.7/logging/__init__.py", line 762, in filter result = f.filter(record) File "/srv/zulip-py3-venv/lib/python3.7/site-packages/django/utils/log.py", line 147, in filter if self.callback(record): File "/home/sjoerd/zulip/zerver/lib/logging_util.py", line 122, in skip_200_and_304 if getattr(record, 'status_code') in [200, 304]: AttributeError: 'LogRecord' object has no attribute 'status_code' ---------------------------------------- ``` Normally, [http.server logs request, status code, size](https://github.com/python/cpython/blob/master/Lib/http/server.py#L544-L545), and [Django extracts the status code from that](https://github.com/django/django/blob/master/django/core/servers/basehttp.py#L144-L157). However, [on errors http.server logs code and message](https://github.com/python/cpython/blob/master/Lib/http/server.py#L457) and Django doesn't extract the status code. Parsing arguments to log messages seems pretty fragile to me, so maybe it's better to accept that there isn't always a status code on a log record. Making `getattr` default to `None` in [`skip_200_and_304`](https://github.com/zulip/zulip/blob/master/zerver/lib/logging_util.py#L122) is probably the best option.
This is likely a regression in the rate-limiter infrastructure rewrite we merged yesterday. @mateuszmandera FYI. Hello @zulip/server-production members, this issue was labeled with the "area: production" label, so you may want to check it out! <!-- areaLabelAddition --> @Sjord Could you provide steps to reproduce this reliably? I'm not able to reproduce so far - I'm hitting the rate limits and things are getting logged as expected It turns out the rate limiting was a red herring. Sorry about that. The actual trigger is requests with invalid syntax. The following curl command triggers this exception in the developer console for me: curl -X POST -H 'Transfer-Encoding : chunked' --data-binary 'a' 'http://zulipdev.com:9991/json/messages/57'
2020-04-21T11:04:25
zulip/zulip
14,742
zulip__zulip-14742
[ "14644", "14701" ]
5e01a0ae8b0239e59a176ec498302475212661f8
diff --git a/scripts/lib/node_cache.py b/scripts/lib/node_cache.py --- a/scripts/lib/node_cache.py +++ b/scripts/lib/node_cache.py @@ -3,7 +3,7 @@ import json import shutil -from typing import Optional, List, IO, Any +from typing import Optional, List from scripts.lib.zulip_tools import subprocess_text_output, run ZULIP_PATH = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) @@ -49,8 +49,6 @@ def generate_sha1sum_node_modules( def setup_node_modules( production: bool = DEFAULT_PRODUCTION, - stdout: Optional[IO[Any]] = None, - stderr: Optional[IO[Any]] = None, prefer_offline: bool = False, ) -> None: yarn_args = get_yarn_args(production=production) @@ -64,9 +62,7 @@ def setup_node_modules( if not os.path.exists(success_stamp): do_yarn_install(target_path, yarn_args, - success_stamp, - stdout=stdout, - stderr=stderr) + success_stamp) print("Using cached node modules from %s" % (cached_node_modules,)) if os.path.islink('node_modules'): @@ -78,9 +74,7 @@ def setup_node_modules( def do_yarn_install( target_path: str, yarn_args: List[str], - success_stamp: str, - stdout: Optional[IO[Any]] = None, - stderr: Optional[IO[Any]] = None, + success_stamp: str ) -> None: os.makedirs(target_path, exist_ok=True) shutil.copy('package.json', target_path) @@ -93,9 +87,8 @@ def do_yarn_install( if os.path.exists("node_modules") and not os.path.exists(cached_node_modules): shutil.copytree("node_modules/", cached_node_modules, symlinks=True) if os.environ.get('CUSTOM_CA_CERTIFICATES'): - run([YARN_BIN, "config", "set", "cafile", os.environ['CUSTOM_CA_CERTIFICATES']], - stdout=stdout, stderr=stderr) + run([YARN_BIN, "config", "set", "cafile", os.environ['CUSTOM_CA_CERTIFICATES']]) run([YARN_BIN, "install", "--non-interactive", "--frozen-lockfile"] + yarn_args, - cwd=target_path, stdout=stdout, stderr=stderr) + cwd=target_path) with open(success_stamp, 'w'): pass
upgrade-zulip-from-git: Provide useful error messages when `update-prod-static` fails In https://github.com/zulip/zulip/issues/14615, we have `upgrade-zulip-from-git` failing inside `update-prod-static`, and the error message is terrible: ``` 2020-04-17 21:23:41,590 upgrade-zulip-stage-2: Building static assets... Cached version not found! Installing node modules. + /srv/zulip-yarn/bin/yarn install --non-interactive --frozen-lockfile --prod Error running a subcommand of ./tools/update-prod-static: /srv/zulip-yarn/bin/yarn install --non-interactive --frozen-lockfile --prod Actual error output for the subcommand is just above this. Traceback (most recent call last): File "./tools/update-prod-static", line 37, in <module> setup_node_modules(production=True, stdout=fp, stderr=fp) File "./tools/../scripts/lib/node_cache.py", line 66, in setup_node_modules stderr=stderr) File "./tools/../scripts/lib/node_cache.py", line 91, in do_yarn_install cwd=target_path, stdout=stdout, stderr=stderr) File "./tools/../scripts/lib/zulip_tools.py", line 200, in run subprocess.check_call(args, **kwargs) File "/usr/lib/python3.5/subprocess.py", line 581, in check_call raise CalledProcessError(retcode, cmd) subprocess.CalledProcessError: Command '['/srv/zulip-yarn/bin/yarn', 'install', '--non-interactive', '--frozen-lockfile', '--prod']' returned non-zero exit status 1 Traceback (most recent call last): File "/home/zulip/deployments/2020-04-18-01-53-13/scripts/lib/upgrade-zulip-stage-2", line 122, in <module> preexec_fn=su_to_zulip) File "/usr/lib/python3.5/subprocess.py", line 581, in check_call raise CalledProcessError(retcode, cmd) subprocess.CalledProcessError: Command '['./tools/update-prod-static', '--authors-not-required', '--prev-deploy', '/home/zulip/deployments/current']' returned non-zero exit status 1 Traceback (most recent call last): File "/home/zulip/deployments/current/scripts/lib/upgrade-zulip-from-git", line 69, in <module> deploy_path, "--from-git"] + deploy_options) File "/usr/lib/python3.5/subprocess.py", line 581, in check_call raise CalledProcessError(retcode, cmd) subprocess.CalledProcessError: Command '['/home/zulip/deployments/2020-04-18-01-53-13/scripts/lib/upgrade-zulip-stage-2', '/home/zulip/deployments/2020-04-18-01-53-13', '--from-git']' returned non-zero exit status 1 ``` Because `update-prod-static` is failing, the actual error messages is in `/home/zulip/deployments/next/var/log/upgrade-prod-static.log`. `update-prod-static` itself has very verbose output, so I think there's two paths forward: * Simplest is to copy `build-release-tarball` in at least printing the path to `upgrade-prod-static.log`, and perhaps catching and not emitting much of that unhelpful traceback. * Better would be to make `update-prod-static` have more sane output. upgrade-zulip-from-git: Provide useful error messages when `update-prod-static` fails This just prints the path to the prod static log file.
Hello @zulip/server-production members, this issue was labeled with the "area: production", "area: production installer" labels, so you may want to check it out! <!-- areaLabelAddition --> Hello @zulip/server-production members, this pull request was labeled with the "area: production", "area: production installer" labels, so you may want to check it out! <!-- areaLabelAddition --> Posted one comment; can you also post example output? This pr is broken sorry. @timabbott updated and this works now. Sample error output: ``` 2020-04-22 17:59:23,483 upgrade-zulip-stage-2: Building static assets... Using cached node modules from /srv/zulip-npm-cache/c18d6cee3155e6013068b5a7f785e30f09ca6a0a/node_modules + ./tools/setup/emoji/build_emoji + ./scripts/setup/inline_email_css.py + ./tools/setup/generate_zulip_bots_static_files.py Error running a subcommand of ./tools/update-prod-static: ./tools/setup/generate_zulip_bots_static_files.py Actual error output for the subcommand is just above this. Traceback (most recent call last): File "./tools/update-prod-static", line 49, in <module> run(['./tools/setup/generate_zulip_bots_static_files.py'], stdout=fp, stderr=fp) File "./tools/../scripts/lib/zulip_tools.py", line 189, in run subprocess.check_call(args, **kwargs) File "/usr/lib/python3.6/subprocess.py", line 311, in check_call raise CalledProcessError(retcode, cmd) subprocess.CalledProcessError: Command '['./tools/setup/generate_zulip_bots_static_files.py']' returned non-zero exit status 1. 2020-04-22 17:59:25,233 upgrade-zulip-stage-2: Building static assets failed. build_emoji: Using cached emojis from /srv/zulip-emoji-cache/933bee7b5c0c76b88258c010e6d2ef597f251825/emoji Traceback (most recent call last): File "./tools/setup/generate_zulip_bots_static_files.py", line 10, in <module> assert 1==2 AssertionError 2020-04-22 17:59:25,234 upgrade-zulip-stage-2: Please check /home/zulip/deployments/next/var/log/update-prod-static.log for the complete error. ``` I was testing to get the error output above the `Actual error output for the subcommand is just above this.` line but that's some trouble and not easy for me to do. I think we probably want to just post the log file path and maybe its last ~50 lines at most; if `update-prod-static` fails halfway through the `webpack` stage, the `update-prod-static` output that you're pasting here will be like 50 screens long, which isn't a good experience. Oh! Updated to just print the file path. This pre-dates the PR, but I wonder if this would be nicer to read in `update-prod-static` (and then helpful for cross-checking against places where we document more manual installs for things like half-supported OSes): ~~~ tools = [ './tools/setup/emoji/build_emoji', './scripts/setup/inline_email_css.py', './tools/setup/generate_zulip_bots_static_files.py', './tools/setup/build_pygments_data', './tools/webpack', ] for tool in tools: run([tool], stdout=fp, stderr=fp) ~~~ Hmm, thinking about this more, I think we might want to try modifying `update-prod-static` so that only `manage.py collectstatic` doesn't get logged to stdout; that's rarely the place things fail, and I think it's also the only place that produces a million lines of output. OK, I think with some small changes we can make the output like this with everything but `manage.py collectstatic` going to stdout. And that bit basically never fails: ``` + ./tools/update-prod-static Using cached node modules from /srv/zulip-npm-cache/c18d6cee3155e6013068b5a7f785e30f09ca6a0a/node_modules + ./tools/setup/emoji/build_emoji build_emoji: Using cached emojis from /srv/zulip-emoji-cache/2a8798306d68e75ed7d6e2f23f2376b0f048ec88/emoji + ./scripts/setup/inline_email_css.py + ./tools/setup/generate_zulip_bots_static_files.py + ./tools/setup/build_pygments_data + ./tools/webpack --quiet Starting webpack compilation + ./manage.py collectstatic --no-default-ignore --noinput -i assets -i emoji-styles -i html -i js -i styles -i templates + ./manage.py compilemessages -v0 ```
2020-04-24T20:32:13
zulip/zulip
14,767
zulip__zulip-14767
[ "14760" ]
9089fd5b08ec0b4b470df2336246e343754722d3
diff --git a/zerver/lib/actions.py b/zerver/lib/actions.py --- a/zerver/lib/actions.py +++ b/zerver/lib/actions.py @@ -851,6 +851,10 @@ def do_deactivate_stream(stream: Stream, log: bool=True) -> None: if DefaultStream.objects.filter(realm_id=stream.realm_id, stream_id=stream.id).exists(): do_remove_default_stream(stream) + default_stream_groups_for_stream = DefaultStreamGroup.objects.filter(streams__id=stream.id) + for group in default_stream_groups_for_stream: + do_remove_streams_from_default_stream_group(stream.realm, group, [stream]) + # Remove the old stream information from remote cache. old_cache_key = get_stream_cache_key(old_name, stream.realm_id) cache_delete(old_cache_key)
diff --git a/zerver/tests/test_subs.py b/zerver/tests/test_subs.py --- a/zerver/tests/test_subs.py +++ b/zerver/tests/test_subs.py @@ -456,6 +456,31 @@ def test_deactivate_stream_removes_default_stream(self) -> None: do_deactivate_stream(stream) self.assertEqual(0, DefaultStream.objects.filter(stream_id=stream.id).count()) + def test_deactivate_stream_removes_stream_from_default_stream_groups(self) -> None: + realm = get_realm('zulip') + streams_to_keep = [] + for stream_name in ["stream1", "stream2"]: + stream = ensure_stream(realm, stream_name) + streams_to_keep.append(stream) + + streams_to_remove = [] + stream = ensure_stream(realm, "stream3") + streams_to_remove.append(stream) + + all_streams = streams_to_keep + streams_to_remove + + def get_streams(group: DefaultStreamGroup) -> List[Stream]: + return list(group.streams.all().order_by('name')) + + group_name = "group1" + description = "This is group1" + do_create_default_stream_group(realm, group_name, description, all_streams) + default_stream_groups = get_default_stream_groups(realm) + self.assertEqual(get_streams(default_stream_groups[0]), all_streams) + + do_deactivate_stream(streams_to_remove[0]) + self.assertEqual(get_streams(default_stream_groups[0]), streams_to_keep) + def test_vacate_private_stream_removes_default_stream(self) -> None: stream = self.make_stream('new_stream', invite_only=True) self.subscribe(self.example_user("hamlet"), stream.name)
Remove stream from DefaultStreamGroup if the stream is deactivated When we deactivate a stream which is part of an already existing DefaultStreamGroup, stream is not removed from the default stream group. This is a bug and the stream should be removed from the default stream group on deactivation of the stream.
@zulipbot claim
2020-04-26T19:57:28
zulip/zulip
14,899
zulip__zulip-14899
[ "14166" ]
a702894e0e70c91d172941c346b2cf5e56413199
diff --git a/version.py b/version.py --- a/version.py +++ b/version.py @@ -29,7 +29,7 @@ # # Changes should be accompanied by documentation explaining what the # new level means in templates/zerver/api/changelog.md. -API_FEATURE_LEVEL = 3 +API_FEATURE_LEVEL = 4 # Bump the minor PROVISION_VERSION to indicate that folks should provision # only when going from an old version of the code to a newer version. Bump diff --git a/zerver/lib/events.py b/zerver/lib/events.py --- a/zerver/lib/events.py +++ b/zerver/lib/events.py @@ -170,6 +170,17 @@ def fetch_initial_state_data(user_profile: UserProfile, state['plan_includes_wide_organization_logo'] = realm.plan_type != Realm.LIMITED state['upgrade_text_for_wide_organization_logo'] = str(Realm.UPGRADE_TEXT_STANDARD) state['realm_default_external_accounts'] = DEFAULT_EXTERNAL_ACCOUNTS + state['jitsi_server_url'] = settings.JITSI_SERVER_URL + state['development_environment'] = settings.DEVELOPMENT + state['server_generation'] = settings.SERVER_GENERATION + state['password_min_length'] = settings.PASSWORD_MIN_LENGTH + state['password_min_guesses'] = settings.PASSWORD_MIN_GUESSES + state['max_file_upload_size_mib'] = settings.MAX_FILE_UPLOAD_SIZE + state['max_avatar_file_size_mib'] = settings.MAX_AVATAR_FILE_SIZE + state['server_inline_image_preview'] = settings.INLINE_IMAGE_PREVIEW + state['server_inline_url_embed_preview'] = settings.INLINE_URL_EMBED_PREVIEW + state['server_avatar_changes_disabled'] = settings.AVATAR_CHANGES_DISABLED + state['server_name_changes_disabled'] = settings.NAME_CHANGES_DISABLED if realm.notifications_stream and not realm.notifications_stream.deactivated: notifications_stream = realm.notifications_stream diff --git a/zerver/views/home.py b/zerver/views/home.py --- a/zerver/views/home.py +++ b/zerver/views/home.py @@ -246,26 +246,15 @@ def home_real(request: HttpRequest) -> HttpResponse: # These end up in a global JavaScript Object named 'page_params'. page_params = dict( # Server settings. - development_environment = settings.DEVELOPMENT, - debug_mode = settings.DEBUG, - test_suite = settings.TEST_SUITE, - poll_timeout = settings.POLL_TIMEOUT, - insecure_desktop_app = insecure_desktop_app, - login_page = settings.HOME_NOT_LOGGED_IN, - root_domain_uri = settings.ROOT_DOMAIN_URI, - max_file_upload_size = settings.MAX_FILE_UPLOAD_SIZE, - max_avatar_file_size = settings.MAX_AVATAR_FILE_SIZE, - server_generation = settings.SERVER_GENERATION, - save_stacktraces = settings.SAVE_FRONTEND_STACKTRACES, - warn_no_email = settings.WARN_NO_EMAIL, - server_inline_image_preview = settings.INLINE_IMAGE_PREVIEW, - server_inline_url_embed_preview = settings.INLINE_URL_EMBED_PREVIEW, - password_min_length = settings.PASSWORD_MIN_LENGTH, - password_min_guesses = settings.PASSWORD_MIN_GUESSES, - jitsi_server_url = settings.JITSI_SERVER_URL, - search_pills_enabled = settings.SEARCH_PILLS_ENABLED, - server_avatar_changes_disabled = settings.AVATAR_CHANGES_DISABLED, - server_name_changes_disabled = settings.NAME_CHANGES_DISABLED, + debug_mode = settings.DEBUG, + test_suite = settings.TEST_SUITE, + poll_timeout = settings.POLL_TIMEOUT, + insecure_desktop_app = insecure_desktop_app, + login_page = settings.HOME_NOT_LOGGED_IN, + root_domain_uri = settings.ROOT_DOMAIN_URI, + save_stacktraces = settings.SAVE_FRONTEND_STACKTRACES, + warn_no_email = settings.WARN_NO_EMAIL, + search_pills_enabled = settings.SEARCH_PILLS_ENABLED, # Misc. extra data. have_initial_messages = user_has_messages, @@ -358,7 +347,7 @@ def home_real(request: HttpRequest) -> HttpResponse: 'show_webathena': show_webathena, 'embedded': narrow_stream is not None, 'invite_as': PreregistrationUser.INVITE_AS, - 'max_file_upload_size': settings.MAX_FILE_UPLOAD_SIZE, + 'max_file_upload_size_mib': settings.MAX_FILE_UPLOAD_SIZE, },) patch_cache_control(response, no_cache=True, no_store=True, must_revalidate=True) return response
diff --git a/frontend_tests/node_tests/compose.js b/frontend_tests/node_tests/compose.js --- a/frontend_tests/node_tests/compose.js +++ b/frontend_tests/node_tests/compose.js @@ -922,7 +922,7 @@ run_test('initialize', () => { global.document = 'document-stub'; global.csrf_token = 'fake-csrf-token'; - page_params.max_file_upload_size = 512; + page_params.max_file_upload_size_mib = 512; let setup_upload_called = false; let uppy_cancel_all_called = false; diff --git a/frontend_tests/node_tests/upload.js b/frontend_tests/node_tests/upload.js --- a/frontend_tests/node_tests/upload.js +++ b/frontend_tests/node_tests/upload.js @@ -176,7 +176,7 @@ run_test('upload_files', () => { upload.upload_files(uppy, config, []); assert.equal($("#compose-send-button").attr("disabled"), false); - page_params.max_file_upload_size = 0; + page_params.max_file_upload_size_mib = 0; let show_error_message_called = false; upload.show_error_message = (config, message) => { show_error_message_called = true; @@ -186,7 +186,7 @@ run_test('upload_files', () => { upload.upload_files(uppy, config, files); assert(show_error_message_called); - page_params.max_file_upload_size = 25; + page_params.max_file_upload_size_mib = 25; let on_click_close_button_callback; $(".compose-send-status-close").one = (event, callback) => { assert.equal(event, "click"); diff --git a/zerver/tests/test_home.py b/zerver/tests/test_home.py --- a/zerver/tests/test_home.py +++ b/zerver/tests/test_home.py @@ -100,8 +100,8 @@ def test_home(self) -> None: "last_event_id", "left_side_userlist", "login_page", - "max_avatar_file_size", - "max_file_upload_size", + "max_avatar_file_size_mib", + "max_file_upload_size_mib", "max_icon_file_size", "max_logo_file_size", "max_message_id",
Make hard-coded `jitsi_server_url` and friends accessible by `/register` In the [Events subsystem doc](https://zulip.readthedocs.io/en/latest/subsystems/events-system.html), `page_params` is documented as the way to access data provided in the `/register` response. I learned recently (@showell points out [here](https://chat.zulip.org/#narrow/stream/127-integrations/topic/Jitsi.20Videocalls/near/829298)) that some fields on `page_params` do not come from the `/register` response. `page_params` is initialized with data from the server settings via the template at templates/zerver/base.html — the fields are specified at line ~225 of zerver/views/home.py — so these fields are essentially hard-coded for use by the web app only. At least a few of these will be nice to have for the mobile app. I consider https://github.com/zulip/zulip-mobile/issues/3546 to be blocked on this, since `jitsi_server_url` is one of these, but there are other ones that it seems helpful for mobile to be aware of, such as `max_file_upload_size` (I don't think we're currently limiting uploads by file size). If some of these could instead be made available through `/register`, the mobile app could easily consume them. (Perhaps they can all be treated this way, but I don't know the intricacies of each.)
Yeah, the intent is to move all of the ones that are not webapp-specific to the `realm` section of the `/register` response; we did most of them a couple years ago and then forgot to finish, I think. It's a very simple change to move one of these; we should probably just figure out the fully list we want to move and move them all. Some, like `TEST_SUITE`, are likely meaningless; let me see if I can put together a proposal. One thing to discuss is naming; we possible want to name these as e.g. `settings_root_domain_uri` or `server_root_domain_uri` to distinguish them. And also what `event_types` one needs to request to get them; by default I'd do `realm`. But here's my summary. These seem useful and could be migrated over modulo potentially adding a naming prefix: development_environment = settings.DEVELOPMENT, max_file_upload_size = settings.MAX_FILE_UPLOAD_SIZE, max_avatar_file_size = settings.MAX_AVATAR_FILE_SIZE, server_generation = settings.SERVER_GENERATION, server_inline_image_preview = settings.INLINE_IMAGE_PREVIEW, server_inline_url_embed_preview = settings.INLINE_URL_EMBED_PREVIEW, password_min_length = settings.PASSWORD_MIN_LENGTH, password_min_guesses = settings.PASSWORD_MIN_GUESSES, jitsi_server_url = settings.JITSI_SERVER_URL, server_avatar_changes_disabled = settings.AVATAR_CHANGES_DISABLED, server_name_changes_disabled = settings.NAME_CHANGES_DISABLED, This is for casper tests or the web UI only. test_suite = settings.TEST_SUITE, insecure_desktop_app = is_outdated_desktop_app(request.META.get("HTTP_USER_AGENT", "")), login_page = settings.HOME_NOT_LOGGED_IN, save_stacktraces = settings.SAVE_FRONTEND_STACKTRACES, warn_no_email = settings.WARN_NO_EMAIL, search_pills_enabled = settings.SEARCH_PILLS_ENABLED, These are unused and should perhaps be removed: has_mobile_devices = user_profile is not None and num_push_devices_for_user(user_profile) > 0, prompt_for_invites = prompt_for_invites, root_domain_uri = settings.ROOT_DOMAIN_URI, These are data about the set of options used for user/organization settings. They're things we ultimately want to be part of the API, but perhaps don't make sense to move now as they may not be in a reusable form. bot_types = get_bot_types(user_profile), default_language_name = get_language_name(register_ret['default_language']), language_list = get_language_list(), This may be in too specific a format to be reusable? Needs checking. language_list_dbl_col = get_language_list_for_templates(register_ret['default_language']), These seem webapp-specific and/or things we might remove/refactor before reusing: needs_tutorial = needs_tutorial, two_fa_enabled = two_fa_enabled, two_fa_enabled_user = two_fa_enabled and bool(default_device(user_profile)), # Actually is whether the realm has only one user. first_in_realm = first_in_realm, initial_servertime = time.time(), # Used for calculating relative presence age I think there might be an active PR that removes these: have_initial_messages = user_has_messages, furthest_read_time = furthest_read_time, I'm not sure what the story with this is but I think it's mostly unused: poll_timeout = settings.POLL_TIMEOUT, Thanks, Tim! After having posted this, I remembered that `/server_settings` exists ([doc](https://zulipchat.com/api/server-settings)); I'd been using that recently for the zulip_version in https://github.com/zulip/zulip-mobile/pull/3839. Might some of these belong there, rather than in the /register response? I think basically no. `/server_settings` is intended to be just the information for clients needed to determine how to connect to and authenticate with a Zulip server; I think we want to keep that interface at the minimum required to achieve that goal. Further, I think it should be OK for an already-authenticated client to only fetch `/server_settings` when it wants to authenticate and setup an API key; any details that are important after that point should ideally be included in the `/register` response, which we expect clients to request more frequently in any plausible design. Thanks! For specific naming choices, Tim started [a chat thread](https://chat.zulip.org/#narrow/stream/3-backend/topic/.2Fregister.20interface/near/830111); good idea. I replied there. > I think it should be OK for an already-authenticated client to only fetch `/server_settings` when it wants to authenticate and setup an API key; any details that are important after that point should ideally be included in the `/register` response, which we expect clients to request more frequently in any plausible design. This discussion gives me the thought that perhaps `zulip_version` should appear in the `/register` response in addition to `/server_settings`. In https://github.com/zulip/zulip-mobile/pull/3839 we started making a `/server_settings` concurrently with every `/register`, for basically the same reason you might for like `jitsi_server_url` -- it may change from time to time. If `/register` had that data, we could go back to the behavior you describe. (... Heh, only for new servers, I guess. But in the long run that's all servers.) (It still needs to be in `/server_settings`, because it can potentially matter already while trying to log in.) Following that chat thread, here's a set of names for the first wave of these. ``` max_file_upload_size = settings.MAX_FILE_UPLOAD_SIZE, max_avatar_file_size = settings.MAX_AVATAR_FILE_SIZE, ``` These two should gain a suffix `_mib` to clarify the units. (The docs say "megabytes" or "MB", but client software needs to know the actual meaning, which is in mebibytes.) ``` development_environment = settings.DEVELOPMENT, server_generation = settings.SERVER_GENERATION, password_min_length = settings.PASSWORD_MIN_LENGTH, password_min_guesses = settings.PASSWORD_MIN_GUESSES, jitsi_server_url = settings.JITSI_SERVER_URL, server_inline_image_preview = settings.INLINE_IMAGE_PREVIEW, server_inline_url_embed_preview = settings.INLINE_URL_EMBED_PREVIEW, server_avatar_changes_disabled = settings.AVATAR_CHANGES_DISABLED, server_name_changes_disabled = settings.NAME_CHANGES_DISABLED, ``` These other names are good, and can go into `/register` with the same name. --- Then, a note on API naming choices, which is background for some of the choices above: ``` max_file_upload_size = settings.MAX_FILE_UPLOAD_SIZE, max_avatar_file_size = settings.MAX_AVATAR_FILE_SIZE, password_min_length = settings.PASSWORD_MIN_LENGTH, password_min_guesses = settings.PASSWORD_MIN_GUESSES, jitsi_server_url = settings.JITSI_SERVER_URL, ``` These are all pieces of server-level configuration, where in the future we potentially might (more likely in some cases than others) add realm/org-level settings to control them. The server-level setting would then be a default, or a maximum or minimum, for the realm/org-level setting. If we do, the intended semantics is that these existing names continue to refer to *the setting that's actually in effect*. In particular, except for the org settings UI itself, any time a client might care about the value of these settings, it should be able to simply keep consulting these names. For example, the `Realm` model might gain a setting for `password_min_length`. The server would then enforce that a chosen password's length is at least ``` max(settings.PASSWORD_MIN_LENGTH, realm.password_min_length) ``` and it would accordingly provide the client that value, like ``` password_min_length = max(settings.PASSWORD_MIN_LENGTH, realm.password_min_length), ``` Separately, for the benefit of specifically the org settings UI, we'd add a field to tell that UI what limits it should apply. This might look like ``` server_password_min_length = settings.PASSWORD_MIN_LENGTH, ``` This plan is different from what we've done with some settings in the past: instead of `foo` for the setting that's in effect and `server_foo` for the server-level limits relevant to the org settings UI, we've had `realm_foo` for the setting chosen by org admins and `server_foo` for the server-level setting. Two reasons we prefer the new plan are: * The old pattern tends to require client code to manually combine the two settings, duplicating logic in the server. * The old pattern means the name of the setting, and/or the client logic for applying it, changes when we add an org-level setting for an existing server-level setting. With the new pattern, the only client-side change needed is to implement the new fragment of org settings UI for the new org-level setting. In particular, clients that don't have an org settings UI don't have to change at all.
2020-05-08T08:57:38
zulip/zulip
14,924
zulip__zulip-14924
[ "14863" ]
60a762704a78de8db3e4219c8ecc7466a54db71e
diff --git a/zerver/lib/bugdown/__init__.py b/zerver/lib/bugdown/__init__.py --- a/zerver/lib/bugdown/__init__.py +++ b/zerver/lib/bugdown/__init__.py @@ -667,7 +667,7 @@ def is_image(self, url: str) -> bool: return False # List from https://support.google.com/chromeos/bin/answer.py?hl=en&answer=183093 - for ext in [".bmp", ".gif", ".jpg", "jpeg", ".png", ".webp"]: + for ext in [".bmp", ".gif", ".jpe", "jpeg", ".jpg", ".png", ".webp"]: if parsed_url.path.lower().endswith(ext): return True return False
JPEG images with suffix jpe are not shown as image When copying an image on iOS (photo library) and pasting this image on Zulip running on Safari on a Mac (Universal clipboard feature), Zulip will detect that it is a graphic, upload the file and generate a reference like [pasted image](https://server.example.com/user_uploads/hash/pasted_image.jpe) The file has an .jpe ending, which is probably not the most standard, but given that it says, ‘pasted image’, I assume that some part of Zulip realised that this is an image. However, this markdown is then only converted into a link. The image file itself is never embedded. To fix the problem, I have to download the file again on the Mac, change the suffix and upload it again. It would be nicer, if there were more ways to force the Markdown to do as I tell it.
I bet we just need to add that extension in our lists of image extensions. Thanks for the report! Hello @zulip/server-misc members, this issue was labeled with the "area: uploads" label, so you may want to check it out! <!-- areaLabelAddition --> Places to change likely include: * `zerver/lib/bugdown/__init__.py` * `puppet/zulip/files/nginx/zulip-include-frontend/uploads.types` * `zerver/lib/upload.py` * Various other things found via `git grep --files-with-match jpeg | xargs grep gif`
2020-05-09T19:15:17